mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2026-01-18 04:41:15 +00:00
Compare commits
2100 Commits
2022.04.08
...
2024.05.26
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ed274b60b1 | ||
|
|
ae2af1104f | ||
|
|
5c019f6328 | ||
|
|
5a2eebc767 | ||
|
|
119d41f270 | ||
|
|
347f13dd9b | ||
|
|
96a134dea6 | ||
|
|
a4da9db87b | ||
|
|
e897bd8292 | ||
|
|
a2e9031605 | ||
|
|
3ba8de62d6 | ||
|
|
0d067e77c3 | ||
|
|
1463945ae5 | ||
|
|
c92e4e625e | ||
|
|
90d2da311b | ||
|
|
3779f2a307 | ||
|
|
63b569bc5e | ||
|
|
82f4f4444e | ||
|
|
eead3bbc01 | ||
|
|
5bbfdb7c99 | ||
|
|
0dd53faeca | ||
|
|
be7db1a5a8 | ||
|
|
65e709d235 | ||
|
|
06cb063839 | ||
|
|
296df0da1d | ||
|
|
7b5674949f | ||
|
|
f2816634e3 | ||
|
|
beaf832c7a | ||
|
|
eef1e9f44f | ||
|
|
78c57cc0e0 | ||
|
|
3f7999533e | ||
|
|
4ccd73fea0 | ||
|
|
3584b8390b | ||
|
|
6e36d17f40 | ||
|
|
c36513f1be | ||
|
|
3e35aa32c7 | ||
|
|
53b4d44f55 | ||
|
|
c999bac02c | ||
|
|
12d8ea8246 | ||
|
|
8e15177b41 | ||
|
|
dd9ad97b1f | ||
|
|
61b17437dc | ||
|
|
7975ddf245 | ||
|
|
6d8a53d870 | ||
|
|
4813173e45 | ||
|
|
41ba4a808b | ||
|
|
351dc0bc33 | ||
|
|
518c1afc15 | ||
|
|
85ec2a337a | ||
|
|
b207d26f83 | ||
|
|
01395a3434 | ||
|
|
cf212d0a33 | ||
|
|
6db96268c5 | ||
|
|
800a43983e | ||
|
|
7e4259dff0 | ||
|
|
f1f158976e | ||
|
|
31b417e1d1 | ||
|
|
fc2879ecb0 | ||
|
|
0a1a8e3005 | ||
|
|
4cc99d7b6c | ||
|
|
3c7a287e28 | ||
|
|
98d71d8c5e | ||
|
|
00a9f2e1f7 | ||
|
|
73f12119b5 | ||
|
|
6b54cccdcb | ||
|
|
c4b87dd885 | ||
|
|
2338827072 | ||
|
|
06d52c8731 | ||
|
|
df5c9e733a | ||
|
|
b38018b781 | ||
|
|
145dc6f656 | ||
|
|
5904853ae5 | ||
|
|
c8bf48f3a8 | ||
|
|
351368cb9a | ||
|
|
96da952504 | ||
|
|
bec9a59e8e | ||
|
|
036e0d92c6 | ||
|
|
cb2fb4a643 | ||
|
|
231c2eacc4 | ||
|
|
c4853655cb | ||
|
|
ac817bc83e | ||
|
|
1a366403d9 | ||
|
|
7e26bd53f9 | ||
|
|
64766459e3 | ||
|
|
89f535e265 | ||
|
|
ff38a011d5 | ||
|
|
8056a3026e | ||
|
|
3ee1194288 | ||
|
|
e3b42d8b1b | ||
|
|
c9ce57d9bf | ||
|
|
02483bea1c | ||
|
|
315b354429 | ||
|
|
0c21c53885 | ||
|
|
168e72dcd3 | ||
|
|
ff07792676 | ||
|
|
216f6a3cb5 | ||
|
|
b19ae095fd | ||
|
|
9590cc6b47 | ||
|
|
79a451e576 | ||
|
|
df0e138fc0 | ||
|
|
2e94602f24 | ||
|
|
4af9d5c2f6 | ||
|
|
36b240f9a7 | ||
|
|
fc53ec13ff | ||
|
|
2ab2651a4a | ||
|
|
b15b0c1d21 | ||
|
|
c8a61a9100 | ||
|
|
f2fd449b46 | ||
|
|
9415f1a5ef | ||
|
|
a48cc86d6f | ||
|
|
954e57e405 | ||
|
|
9073ae6458 | ||
|
|
4cd9e251b9 | ||
|
|
0ae16ceb18 | ||
|
|
443e206ec4 | ||
|
|
4c3b7a0769 | ||
|
|
16be117729 | ||
|
|
b49d5ffc53 | ||
|
|
36baaa10e0 | ||
|
|
02f93ff51b | ||
|
|
c59de48e2b | ||
|
|
0284f1fee2 | ||
|
|
e8032503b9 | ||
|
|
97362712a1 | ||
|
|
246571ae1d | ||
|
|
32abfb00bd | ||
|
|
c305a25c1b | ||
|
|
e3a3ed8a98 | ||
|
|
a25a424323 | ||
|
|
86e3b82261 | ||
|
|
e7b17fce14 | ||
|
|
a2d0840739 | ||
|
|
86a972033e | ||
|
|
50c2935231 | ||
|
|
0df63cce69 | ||
|
|
63f685f341 | ||
|
|
3699eeb67c | ||
|
|
979ce2e786 | ||
|
|
58dd0f8d1e | ||
|
|
cb61e20c26 | ||
|
|
9c42b7eef5 | ||
|
|
e5d4f11104 | ||
|
|
bc2b8c0596 | ||
|
|
aa7e9ae4f4 | ||
|
|
07f5b2f757 | ||
|
|
ff349ff94a | ||
|
|
f859ed3ba1 | ||
|
|
17d248a587 | ||
|
|
388c979ac6 | ||
|
|
22e4dfacb6 | ||
|
|
86d2f4d248 | ||
|
|
52f5be1f1e | ||
|
|
0b81d4d252 | ||
|
|
f849d77ab5 | ||
|
|
f2868b26e9 | ||
|
|
be77923ffe | ||
|
|
8c05b3ebae | ||
|
|
0da66980d3 | ||
|
|
17b96974a3 | ||
|
|
8463fb510a | ||
|
|
615a84447e | ||
|
|
ed3bb2b0a1 | ||
|
|
45491a2a30 | ||
|
|
a687226b48 | ||
|
|
93240fc184 | ||
|
|
47ab66db0f | ||
|
|
0abf2f1f15 | ||
|
|
2d91b98456 | ||
|
|
8828f4576b | ||
|
|
dbd8b1bff9 | ||
|
|
8993721ecb | ||
|
|
263a4b55ac | ||
|
|
b136e2af34 | ||
|
|
b2cc150ad8 | ||
|
|
785ab1af7f | ||
|
|
7aad06541e | ||
|
|
d3d4187da9 | ||
|
|
c8c9039e64 | ||
|
|
df773c3d5d | ||
|
|
f4f9f6d00e | ||
|
|
dfd8c0b696 | ||
|
|
dd29e6e5fd | ||
|
|
96f3924bac | ||
|
|
0fcefb92f3 | ||
|
|
e4fbe5f886 | ||
|
|
cd7086c0d5 | ||
|
|
cf91400a1d | ||
|
|
ac340d0745 | ||
|
|
11ffa92a61 | ||
|
|
ede624d1db | ||
|
|
40966e8da2 | ||
|
|
eedb38ce40 | ||
|
|
6ad11fef65 | ||
|
|
f0426e9ca5 | ||
|
|
d9b4154cbc | ||
|
|
9749ac7fec | ||
|
|
413d367580 | ||
|
|
aa13a8e3dd | ||
|
|
8f423cf805 | ||
|
|
804f236611 | ||
|
|
f00c0def74 | ||
|
|
e546e5d3b3 | ||
|
|
4170b3d712 | ||
|
|
9ff9466455 | ||
|
|
e28e135d6f | ||
|
|
f1570ab84d | ||
|
|
069b2aedae | ||
|
|
5eedc208ec | ||
|
|
464c919ea8 | ||
|
|
3894ab9574 | ||
|
|
b05640d532 | ||
|
|
7a29cbbd5f | ||
|
|
2e8de097ad | ||
|
|
f3d5face83 | ||
|
|
eabbccc439 | ||
|
|
0de09c5b9e | ||
|
|
6a6cdcd182 | ||
|
|
998dffb5a2 | ||
|
|
29a74a6126 | ||
|
|
55f1833376 | ||
|
|
3d9dc2f359 | ||
|
|
28e53d60df | ||
|
|
f591e605df | ||
|
|
9a8afadd17 | ||
|
|
104a7b5a46 | ||
|
|
7e90e34fa4 | ||
|
|
4ce57d3b87 | ||
|
|
ffff1bc659 | ||
|
|
4f04347909 | ||
|
|
4392447d94 | ||
|
|
43cfd462c0 | ||
|
|
974d444039 | ||
|
|
80ed8bdeba | ||
|
|
de954c1b4d | ||
|
|
0085e2bab8 | ||
|
|
73fcfa39f5 | ||
|
|
41d6b61e98 | ||
|
|
0bee29493c | ||
|
|
644738ddaa | ||
|
|
c168d8791d | ||
|
|
ddd4b5e10a | ||
|
|
f788149237 | ||
|
|
017adb28e7 | ||
|
|
2e30b5567b | ||
|
|
beaa1a4455 | ||
|
|
fb44020fa9 | ||
|
|
3dc9232e1a | ||
|
|
9401736fd0 | ||
|
|
cd0443fb14 | ||
|
|
03536126d3 | ||
|
|
1ed5ee2f04 | ||
|
|
3876429d72 | ||
|
|
b0059f0413 | ||
|
|
b14e818b37 | ||
|
|
867f637b95 | ||
|
|
920397634d | ||
|
|
b8a433aaca | ||
|
|
fd647775e2 | ||
|
|
775cde82dc | ||
|
|
868d2f60a7 | ||
|
|
a1b7784289 | ||
|
|
882e3b753c | ||
|
|
540b682981 | ||
|
|
05420227aa | ||
|
|
35d96982f1 | ||
|
|
acaf806c15 | ||
|
|
07256b9fee | ||
|
|
e439693f72 | ||
|
|
96d0f8c1cb | ||
|
|
e3ce2b385e | ||
|
|
4253e3b7f4 | ||
|
|
8e765755f7 | ||
|
|
ffa017cfc5 | ||
|
|
a0d50aabc5 | ||
|
|
2f4b575946 | ||
|
|
fc2cc626f0 | ||
|
|
a2bac6b7ad | ||
|
|
4b8b0dded8 | ||
|
|
4a6ff0b47a | ||
|
|
62c65bfaf8 | ||
|
|
d63eae7e7f | ||
|
|
2792092afd | ||
|
|
cbed249aaa | ||
|
|
3725b4f0c9 | ||
|
|
67bb70cd70 | ||
|
|
9b5efaf86b | ||
|
|
999ea80beb | ||
|
|
41b6cdb419 | ||
|
|
02e343f6ef | ||
|
|
a514cc2feb | ||
|
|
87286e93af | ||
|
|
3c4d3ee491 | ||
|
|
5b68c478fb | ||
|
|
9526b1f179 | ||
|
|
0023af81fb | ||
|
|
cae6e46107 | ||
|
|
c91d8b1899 | ||
|
|
77c2472ca1 | ||
|
|
d79c7e9937 | ||
|
|
5dda3b291f | ||
|
|
5f25f348f9 | ||
|
|
a40b0070c2 | ||
|
|
9cd9044790 | ||
|
|
f0e8bc7c60 | ||
|
|
c099ec9392 | ||
|
|
c0ecceeefe | ||
|
|
3e083191cd | ||
|
|
9f1e9dab21 | ||
|
|
5a63454b36 | ||
|
|
fcaa2e735b | ||
|
|
35f4f764a7 | ||
|
|
f24e44e8cb | ||
|
|
811d298b23 | ||
|
|
69d3191495 | ||
|
|
50e06e21a6 | ||
|
|
4310b6650e | ||
|
|
1713c88273 | ||
|
|
4a07a455bb | ||
|
|
5eb1458be4 | ||
|
|
1a36dbad71 | ||
|
|
12f0427405 | ||
|
|
5154dc0a68 | ||
|
|
8ab8465083 | ||
|
|
e641aab7a6 | ||
|
|
20cdad5a2c | ||
|
|
43694ce13c | ||
|
|
8226a3818f | ||
|
|
c51316f8a6 | ||
|
|
a281beba8d | ||
|
|
ba6b0c8261 | ||
|
|
6171b050d7 | ||
|
|
aa5dcc4ee6 | ||
|
|
5e2e24b2c5 | ||
|
|
fee2d8d9c3 | ||
|
|
cf9af2c7f1 | ||
|
|
cf6413e840 | ||
|
|
5498729c59 | ||
|
|
393b487a4e | ||
|
|
4d9dc0abe2 | ||
|
|
014cb5774d | ||
|
|
8e6e365172 | ||
|
|
95e82347b3 | ||
|
|
5b8c69ae04 | ||
|
|
5af1f19787 | ||
|
|
b6951271ac | ||
|
|
ffbd4f2a02 | ||
|
|
292d60b1ed | ||
|
|
85b33f5c16 | ||
|
|
85a2d07c1f | ||
|
|
9f40cd2896 | ||
|
|
f10589e345 | ||
|
|
f9fb3ce86e | ||
|
|
5f009a094f | ||
|
|
225cf2b830 | ||
|
|
2d1d683a54 | ||
|
|
65de7d204c | ||
|
|
c39358a54b | ||
|
|
1f8bd8eba8 | ||
|
|
00cdda4f6f | ||
|
|
116c268438 | ||
|
|
e7d22348e7 | ||
|
|
50eaea9fd7 | ||
|
|
f45c4efcd9 | ||
|
|
13b3cb3c2b | ||
|
|
0d531c35ec | ||
|
|
bc4ab17b38 | ||
|
|
632b8ee54e | ||
|
|
c919b68f7e | ||
|
|
19741ab8a4 | ||
|
|
37755a037e | ||
|
|
196eb0fe77 | ||
|
|
db8b4edc7d | ||
|
|
1c54a98e19 | ||
|
|
00a3e47bf5 | ||
|
|
c5f01bf7d4 | ||
|
|
c91af948e4 | ||
|
|
6b5d93b0b0 | ||
|
|
298230e550 | ||
|
|
d5d1517e7d | ||
|
|
7e09c147fd | ||
|
|
e370f9ec36 | ||
|
|
b1a1ec1540 | ||
|
|
0b6f829b1d | ||
|
|
f98a3305eb | ||
|
|
04a5e06350 | ||
|
|
b03c89309e | ||
|
|
71f28097fe | ||
|
|
044886c220 | ||
|
|
993edd3f6e | ||
|
|
6a9c7a2b52 | ||
|
|
a174c453ee | ||
|
|
15f22b4880 | ||
|
|
9751a457cf | ||
|
|
5a230233d6 | ||
|
|
4903f452b6 | ||
|
|
ff2fde1b8f | ||
|
|
deeb13eae8 | ||
|
|
bb5a54e6db | ||
|
|
628fa244bb | ||
|
|
9cafb9ff17 | ||
|
|
1732eccc0a | ||
|
|
a0b19d319a | ||
|
|
cc07f5cc85 | ||
|
|
ccfd70f4c2 | ||
|
|
45d82be65f | ||
|
|
3237f8ba29 | ||
|
|
1725e943b0 | ||
|
|
9f09bdcfcb | ||
|
|
f124fa4588 | ||
|
|
585d0ed9ab | ||
|
|
1fa3f24d4b | ||
|
|
ddb2d7588b | ||
|
|
f223b1b078 | ||
|
|
6fe82491ed | ||
|
|
34df1c1f60 | ||
|
|
1d24da6c89 | ||
|
|
66a0127d45 | ||
|
|
3f90813f06 | ||
|
|
64de1a4c25 | ||
|
|
f96ab86cd8 | ||
|
|
f4b95acafc | ||
|
|
fe6c82ccff | ||
|
|
24f827875c | ||
|
|
15cb3528cb | ||
|
|
2325d03aa7 | ||
|
|
e569c2d1f4 | ||
|
|
a489f07150 | ||
|
|
5efe68b73c | ||
|
|
b530118e7f | ||
|
|
dcfad52812 | ||
|
|
0783fd558e | ||
|
|
0f634dba3a | ||
|
|
21dc069bea | ||
|
|
5d3a3cd493 | ||
|
|
a9d3f4b20a | ||
|
|
b012271d01 | ||
|
|
f04b5bedad | ||
|
|
d4f14a72dc | ||
|
|
87264d4fda | ||
|
|
a00af29853 | ||
|
|
0b6ad22e6a | ||
|
|
5438593a35 | ||
|
|
9970d74c83 | ||
|
|
20314dd46f | ||
|
|
1d03633c5a | ||
|
|
8afd9468b0 | ||
|
|
ef12dbdcd3 | ||
|
|
46acc418a5 | ||
|
|
6ba3085616 | ||
|
|
f6e97090d2 | ||
|
|
2863fcf2b6 | ||
|
|
c76c96677f | ||
|
|
15b252dfd2 | ||
|
|
312a2d1e8b | ||
|
|
54579be436 | ||
|
|
05adfd883a | ||
|
|
3ff494f6f4 | ||
|
|
9b5bedf13a | ||
|
|
cb480e390d | ||
|
|
25a4bd345a | ||
|
|
3906de0755 | ||
|
|
7d337ca977 | ||
|
|
10025b715e | ||
|
|
595ea4a99b | ||
|
|
2622c804d1 | ||
|
|
fd8fcf8f4f | ||
|
|
21b25281c5 | ||
|
|
4a601c9eff | ||
|
|
464327acdb | ||
|
|
ef79d20dc9 | ||
|
|
39abae2354 | ||
|
|
4ce2f29a50 | ||
|
|
177f0d963e | ||
|
|
8e02a4dcc8 | ||
|
|
7b8b1cf5eb | ||
|
|
a40e0b37df | ||
|
|
4e38e2ae9d | ||
|
|
8a8b54523a | ||
|
|
700444c23d | ||
|
|
b73c409318 | ||
|
|
b634ba742d | ||
|
|
2acd1d555e | ||
|
|
b286ec68f1 | ||
|
|
e030b6b6fb | ||
|
|
b931664231 | ||
|
|
feebf6d02f | ||
|
|
84e26038d4 | ||
|
|
4de94b9e16 | ||
|
|
88a99c87b6 | ||
|
|
09f815ad52 | ||
|
|
b7098d46b5 | ||
|
|
1c51c520f7 | ||
|
|
9d7ded6419 | ||
|
|
4392c4680c | ||
|
|
377e85a179 | ||
|
|
03e85ea99d | ||
|
|
792f1e64f6 | ||
|
|
19c90e405b | ||
|
|
e831c80e8b | ||
|
|
0e722f2f3c | ||
|
|
47c598783c | ||
|
|
35d9cbaf96 | ||
|
|
2ad3873f0d | ||
|
|
2f2dda3a7e | ||
|
|
fbcc299bd8 | ||
|
|
48cceec1dd | ||
|
|
a9efb4b8d7 | ||
|
|
f980df734c | ||
|
|
91a670a4f7 | ||
|
|
b095fd3fa9 | ||
|
|
0730d5a966 | ||
|
|
cc8d844152 | ||
|
|
eb5bdbfa70 | ||
|
|
c54ddfba0f | ||
|
|
088add9567 | ||
|
|
de015e9307 | ||
|
|
61bdf15fc7 | ||
|
|
1eaca74bc2 | ||
|
|
92feb5654c | ||
|
|
698beb9a49 | ||
|
|
15591940ff | ||
|
|
6636021206 | ||
|
|
eaee21bf71 | ||
|
|
5ca095cbcd | ||
|
|
c2da0b5ea2 | ||
|
|
c1d71d0d9f | ||
|
|
661c9a1d02 | ||
|
|
568f080518 | ||
|
|
904a19ee93 | ||
|
|
52414d64ca | ||
|
|
2269065ad6 | ||
|
|
a5e264d74b | ||
|
|
b84fda7388 | ||
|
|
5fccabac27 | ||
|
|
21f40e75df | ||
|
|
b3febedbeb | ||
|
|
295fbb3ae3 | ||
|
|
35f9a306e6 | ||
|
|
9d6254069c | ||
|
|
b532556d0a | ||
|
|
cf11b40ac4 | ||
|
|
40999467f7 | ||
|
|
8ac5b6d96a | ||
|
|
69b03f84f8 | ||
|
|
9e68747f96 | ||
|
|
ba8e9eb2c8 | ||
|
|
20fbbd9249 | ||
|
|
81f46ac573 | ||
|
|
63e0c5748c | ||
|
|
efa2339502 | ||
|
|
58493923e9 | ||
|
|
30ba233d4c | ||
|
|
836e06d246 | ||
|
|
94389b225d | ||
|
|
9652bca1bd | ||
|
|
538d37671a | ||
|
|
2da7bcca16 | ||
|
|
eda0e415d2 | ||
|
|
20c3c9b433 | ||
|
|
635ae31f68 | ||
|
|
5367585219 | ||
|
|
308936619c | ||
|
|
5be7e97886 | ||
|
|
b4c1c408c6 | ||
|
|
23d829a342 | ||
|
|
0ce1f48bf1 | ||
|
|
ecef42c3ad | ||
|
|
a83da3717d | ||
|
|
9d376c4dae | ||
|
|
5336bf57a7 | ||
|
|
9bf14be775 | ||
|
|
cebbd33b1c | ||
|
|
069cbece9d | ||
|
|
f659e64394 | ||
|
|
7d3d658f4c | ||
|
|
98eac0e6ba | ||
|
|
6e07e4bc7e | ||
|
|
aee6b9b88c | ||
|
|
578a82e497 | ||
|
|
497bbbbd73 | ||
|
|
7b71643cc9 | ||
|
|
66cc64ff66 | ||
|
|
a006ce2b27 | ||
|
|
5d0395498d | ||
|
|
fe371dcf0b | ||
|
|
d3d81cc98f | ||
|
|
99c99c7185 | ||
|
|
c6ef553792 | ||
|
|
69dbfe01c4 | ||
|
|
2301b5c1b7 | ||
|
|
77bff23ee9 | ||
|
|
7237c8dca0 | ||
|
|
30ea88591b | ||
|
|
630a55df8d | ||
|
|
bae4834245 | ||
|
|
099fb1b35c | ||
|
|
4b3a6ef1b3 | ||
|
|
665876034c | ||
|
|
b9f2bc2dbe | ||
|
|
c2d8ee0000 | ||
|
|
56b3dc0335 | ||
|
|
d7aee8e310 | ||
|
|
59e92b1f18 | ||
|
|
1be0a96a4d | ||
|
|
fcd6a76adc | ||
|
|
7cccab79e7 | ||
|
|
ed71189781 | ||
|
|
a0de8bb860 | ||
|
|
876b70c8ed | ||
|
|
339c339fec | ||
|
|
dab87ca236 | ||
|
|
378ae9f9fb | ||
|
|
db7b054a61 | ||
|
|
db97438940 | ||
|
|
b9de629d78 | ||
|
|
a854fbec56 | ||
|
|
30b29f3715 | ||
|
|
6d6081dda1 | ||
|
|
6014355c61 | ||
|
|
f73c118035 | ||
|
|
546b2c28a1 | ||
|
|
6148833f5c | ||
|
|
8cb7fc44db | ||
|
|
3f7965105d | ||
|
|
de20687ee6 | ||
|
|
b09bd0c196 | ||
|
|
127a224606 | ||
|
|
86eeb044c2 | ||
|
|
9a04113dfb | ||
|
|
ba06d77a31 | ||
|
|
4bf912282a | ||
|
|
a15fcd299e | ||
|
|
c03a58ec99 | ||
|
|
bbeacff7fc | ||
|
|
dae349da97 | ||
|
|
95abea9a03 | ||
|
|
550e65410a | ||
|
|
39837ae319 | ||
|
|
86aea0d3a2 | ||
|
|
11de6fec9c | ||
|
|
a250b24733 | ||
|
|
25b6e8f946 | ||
|
|
e705738338 | ||
|
|
62b5c94cad | ||
|
|
e0c4db04dc | ||
|
|
81b4712bca | ||
|
|
994f7ef8e6 | ||
|
|
a264433c9f | ||
|
|
9f66247289 | ||
|
|
e57eb98222 | ||
|
|
9b16762f48 | ||
|
|
65cfa2b057 | ||
|
|
f4ea501551 | ||
|
|
af86873218 | ||
|
|
75dc8e673b | ||
|
|
71baa490eb | ||
|
|
613dbce177 | ||
|
|
bb5d84c9d2 | ||
|
|
1d3d579c21 | ||
|
|
42ded0a429 | ||
|
|
6c5211cebe | ||
|
|
2b029ca0a9 | ||
|
|
131d132da5 | ||
|
|
3d2623a898 | ||
|
|
227bf1a33b | ||
|
|
c365dba843 | ||
|
|
1b392f905d | ||
|
|
1ba6fe9db5 | ||
|
|
1bcb9fe871 | ||
|
|
8a4cd12c8f | ||
|
|
2cfe221fbb | ||
|
|
2af4eeb772 | ||
|
|
325191d0c9 | ||
|
|
bdd0b75e3f | ||
|
|
92315c0377 | ||
|
|
b03fa78345 | ||
|
|
cc0619f62d | ||
|
|
b532a34810 | ||
|
|
3121512228 | ||
|
|
f8b4bcc0a7 | ||
|
|
1ceb657bdd | ||
|
|
ad8902f616 | ||
|
|
94ed638a43 | ||
|
|
bc344cd456 | ||
|
|
906c0bdcd8 | ||
|
|
337734d4a8 | ||
|
|
fa44802809 | ||
|
|
47bcd43724 | ||
|
|
662ef1e910 | ||
|
|
6355b5f1e1 | ||
|
|
90db9a3c00 | ||
|
|
49296437a8 | ||
|
|
1cffd621cb | ||
|
|
3b7f5300c5 | ||
|
|
4dc4d8473c | ||
|
|
8776349ef6 | ||
|
|
af1fd12f67 | ||
|
|
fcbc9ed760 | ||
|
|
a2be9781fb | ||
|
|
8f05fbae2a | ||
|
|
5b4b92769a | ||
|
|
91302ed349 | ||
|
|
f393bbe724 | ||
|
|
8a8af356e3 | ||
|
|
d949c10c45 | ||
|
|
ef8509c300 | ||
|
|
5e16cf92eb | ||
|
|
f0a1ff1181 | ||
|
|
58786a10f2 | ||
|
|
e59e20744e | ||
|
|
89bed01374 | ||
|
|
de4cf77ec1 | ||
|
|
812cdfa06c | ||
|
|
cd810afe2a | ||
|
|
b4e0d75848 | ||
|
|
71dc18fa29 | ||
|
|
98cb1eda7a | ||
|
|
774aa09dd6 | ||
|
|
f2ff0f6f19 | ||
|
|
5fd8367496 | ||
|
|
0dff8e4d1e | ||
|
|
1e75d97db2 | ||
|
|
81ca451480 | ||
|
|
a4486bfc1d | ||
|
|
3f756c8c40 | ||
|
|
7f9c6a63b1 | ||
|
|
db22142f6f | ||
|
|
d7cd97e8d8 | ||
|
|
d1b2156149 | ||
|
|
42f2d40b47 | ||
|
|
1619ab3e67 | ||
|
|
84078a8b38 | ||
|
|
ad54c9130e | ||
|
|
db3ad8a676 | ||
|
|
af7585c824 | ||
|
|
02948a17d9 | ||
|
|
424f3bf033 | ||
|
|
ebe1b4e34f | ||
|
|
a35af4306d | ||
|
|
93b39cdbd9 | ||
|
|
97afb093d4 | ||
|
|
2e023649ea | ||
|
|
51a07b0dca | ||
|
|
eedda5252c | ||
|
|
5cc09c004b | ||
|
|
6f69101dc9 | ||
|
|
81c8b9bdd9 | ||
|
|
01aba2519a | ||
|
|
13ff780953 | ||
|
|
ff9b0e071f | ||
|
|
0a5d7c39e1 | ||
|
|
125ffaa173 | ||
|
|
f9213f8a2d | ||
|
|
fdd69db389 | ||
|
|
83465fc410 | ||
|
|
6daaf21092 | ||
|
|
7bcd481321 | ||
|
|
c8561c6d03 | ||
|
|
cab94a0cd8 | ||
|
|
345b4c0aed | ||
|
|
8790ea7b25 | ||
|
|
ab6057ec80 | ||
|
|
9d7fde89a4 | ||
|
|
1a2eb5bda5 | ||
|
|
f8ae441501 | ||
|
|
b4a252fba8 | ||
|
|
4f7b11cc1c | ||
|
|
d1795f4a6a | ||
|
|
44c0d66442 | ||
|
|
8213ce28a4 | ||
|
|
14a14335b2 | ||
|
|
c2b801fea5 | ||
|
|
59d9fe0831 | ||
|
|
ee0ed0338d | ||
|
|
c2a1bdb009 | ||
|
|
7f8ddebbb5 | ||
|
|
7bc9251746 | ||
|
|
4815d35c19 | ||
|
|
97d60ad8cd | ||
|
|
5ee9a7d6e1 | ||
|
|
971d901d12 | ||
|
|
12037d8b0a | ||
|
|
c91ac833ea | ||
|
|
2fb35f6004 | ||
|
|
1a7dcca378 | ||
|
|
55ed4ff734 | ||
|
|
01231feb14 | ||
|
|
f41b949a2e | ||
|
|
c35448b7b1 | ||
|
|
1c16d9df53 | ||
|
|
ecfe47973f | ||
|
|
18f8fba7c8 | ||
|
|
c2502cfed9 | ||
|
|
1fe5bf240e | ||
|
|
26c517b29c | ||
|
|
6f10cdcf7e | ||
|
|
03789976d3 | ||
|
|
dc3c44f349 | ||
|
|
937264419f | ||
|
|
372a0f3b9d | ||
|
|
4cbfa570a1 | ||
|
|
45e87ea106 | ||
|
|
dbce5afa6b | ||
|
|
f78eb41e1c | ||
|
|
b38d4c941d | ||
|
|
489f51279d | ||
|
|
2d306c03d6 | ||
|
|
f6e43d6fa9 | ||
|
|
fd5d93f704 | ||
|
|
f8f9250fe2 | ||
|
|
3459d3c5af | ||
|
|
c25cac2f8e | ||
|
|
a58182b75a | ||
|
|
4afb208cf0 | ||
|
|
5c14b21367 | ||
|
|
02312c03cf | ||
|
|
94627c5dde | ||
|
|
c6d4b82a8b | ||
|
|
17d7ca84ea | ||
|
|
bfdf144c7e | ||
|
|
c6d3f81a40 | ||
|
|
aed945e1b9 | ||
|
|
fc5a7f9b27 | ||
|
|
738c90a463 | ||
|
|
93e12ed76e | ||
|
|
6dc00acf0f | ||
|
|
daafbf49b3 | ||
|
|
3f66b6fe50 | ||
|
|
b87e01c123 | ||
|
|
08916a49c7 | ||
|
|
66468bbf49 | ||
|
|
b844a3f8b1 | ||
|
|
5caf30dbc3 | ||
|
|
4ad58667c1 | ||
|
|
edbe5b589d | ||
|
|
032de83ea9 | ||
|
|
8417f26b8a | ||
|
|
7aeda6cc9e | ||
|
|
15b2d3db1d | ||
|
|
4823ec9f46 | ||
|
|
46f1370e9a | ||
|
|
69a40e4a7f | ||
|
|
955c89584b | ||
|
|
69bec6730e | ||
|
|
23c39a4bea | ||
|
|
b73193c99a | ||
|
|
c4efa0aefe | ||
|
|
44a79958f0 | ||
|
|
665472a7de | ||
|
|
d2e84d5eb0 | ||
|
|
447afb9eaa | ||
|
|
6f2287cb18 | ||
|
|
1d7656184c | ||
|
|
f7f7a877bf | ||
|
|
c8bc203fbf | ||
|
|
21b9413cf7 | ||
|
|
ef8fb7f029 | ||
|
|
3b52a60688 | ||
|
|
c449c0655d | ||
|
|
0c7ce146e4 | ||
|
|
ddae33754a | ||
|
|
45998b3e37 | ||
|
|
2f07c4c1da | ||
|
|
b423b6a48e | ||
|
|
147e62fc58 | ||
|
|
b079c26f0a | ||
|
|
4d9280c9c8 | ||
|
|
17ba4343cf | ||
|
|
f005a35aa7 | ||
|
|
7a7b1376fb | ||
|
|
b5f61b69d4 | ||
|
|
7cf51f2191 | ||
|
|
170605840e | ||
|
|
30647668a9 | ||
|
|
ed81b74802 | ||
|
|
62beefa818 | ||
|
|
0c4e0fbcad | ||
|
|
c86e433c35 | ||
|
|
9b30cd3dfc | ||
|
|
21b5ec86c2 | ||
|
|
c16644642b | ||
|
|
04f8018a05 | ||
|
|
d669772c65 | ||
|
|
ec9311c41b | ||
|
|
78fde6e339 | ||
|
|
80b732b7a9 | ||
|
|
1ea15603d8 | ||
|
|
8f0be90ecb | ||
|
|
6a765f135c | ||
|
|
ab29e47029 | ||
|
|
e5265dc651 | ||
|
|
cbdf9408e6 | ||
|
|
2c566ed141 | ||
|
|
9c92b803fa | ||
|
|
7a6f6f2459 | ||
|
|
ea05708203 | ||
|
|
9874e82b5a | ||
|
|
84ffeb7d5e | ||
|
|
7666b93604 | ||
|
|
93e7c6995e | ||
|
|
3f7e2bd80e | ||
|
|
925936908a | ||
|
|
90c1f51206 | ||
|
|
56793f74c3 | ||
|
|
d1483ec693 | ||
|
|
979568f26e | ||
|
|
b093c38cc9 | ||
|
|
2d97d154fe | ||
|
|
c3f624ef0a | ||
|
|
52ecc33e22 | ||
|
|
26010b5cec | ||
|
|
c6786ff3ba | ||
|
|
79c77e85b7 | ||
|
|
faa0332ed6 | ||
|
|
7e35526d5b | ||
|
|
ef0848abd4 | ||
|
|
0a6918a4a1 | ||
|
|
141a8dff98 | ||
|
|
68be95bd0c | ||
|
|
ab92d8651c | ||
|
|
0f0875ed55 | ||
|
|
95a383be1b | ||
|
|
9be0fe1fd9 | ||
|
|
33b737bedf | ||
|
|
0898c5c8cc | ||
|
|
f68434cc74 | ||
|
|
baa922b5c7 | ||
|
|
9bfe0d15bd | ||
|
|
8ceb07e870 | ||
|
|
6bdb64e2a2 | ||
|
|
3ae182ad89 | ||
|
|
5cc0a8fd2e | ||
|
|
6994afc030 | ||
|
|
78bc1868ff | ||
|
|
69b2f838d3 | ||
|
|
44369c9afa | ||
|
|
c2e0fc40a7 | ||
|
|
06966cb896 | ||
|
|
e4cf7741f9 | ||
|
|
c14af7a741 | ||
|
|
9a06b7b189 | ||
|
|
216bcb66d7 | ||
|
|
460da07439 | ||
|
|
03025b6e10 | ||
|
|
071670cbea | ||
|
|
427a8fafbb | ||
|
|
607510b9f2 | ||
|
|
98ac902c49 | ||
|
|
cbfe2e5cbe | ||
|
|
cf9fd52fab | ||
|
|
80ea6d3dea | ||
|
|
1e3c2b6ec2 | ||
|
|
026435714c | ||
|
|
0181b9a1b3 | ||
|
|
e389d172b6 | ||
|
|
2a23d92d9e | ||
|
|
86cb922118 | ||
|
|
c795c39f27 | ||
|
|
7a6c8a0807 | ||
|
|
89dbf08483 | ||
|
|
e6ab678e36 | ||
|
|
ab1de9cb1e | ||
|
|
871c907454 | ||
|
|
0551511b45 | ||
|
|
c9abebb851 | ||
|
|
66aeaac9aa | ||
|
|
3588be59ce | ||
|
|
2d5cae9636 | ||
|
|
9b7a48abd1 | ||
|
|
01ddec7e66 | ||
|
|
6f4fc5660f | ||
|
|
3b479100df | ||
|
|
d4e6ef4077 | ||
|
|
c459d45dd4 | ||
|
|
8729e7b57c | ||
|
|
392389b7df | ||
|
|
eb8fd6d044 | ||
|
|
f44cb4e77b | ||
|
|
46580ced56 | ||
|
|
b404712822 | ||
|
|
1f8489cccb | ||
|
|
ed4cc4ea79 | ||
|
|
cf60522652 | ||
|
|
45db357289 | ||
|
|
8a83baaf21 | ||
|
|
7accdd9845 | ||
|
|
283a0b5bc5 | ||
|
|
22ccd5420b | ||
|
|
08ff6d59f9 | ||
|
|
4a6272c6d1 | ||
|
|
640c934823 | ||
|
|
55676fe498 | ||
|
|
354d5fca7a | ||
|
|
9344964281 | ||
|
|
bfc861a91e | ||
|
|
fe2ce85aff | ||
|
|
d21056f4cf | ||
|
|
b2e0343ba0 | ||
|
|
4815bbfc41 | ||
|
|
776d1c3f0c | ||
|
|
12647e03d4 | ||
|
|
77df20f14c | ||
|
|
29cb20bd56 | ||
|
|
d400e261cf | ||
|
|
9acf1ee25f | ||
|
|
40d77d8902 | ||
|
|
2d5a8c5db2 | ||
|
|
77d6d13646 | ||
|
|
9fddc12ab0 | ||
|
|
b38cae49e6 | ||
|
|
7f51861b18 | ||
|
|
5b28cef72d | ||
|
|
31e183557f | ||
|
|
f34804b2f9 | ||
|
|
65f6e80780 | ||
|
|
b059188383 | ||
|
|
5038f6d713 | ||
|
|
4d248e29d2 | ||
|
|
8e9fe43cd3 | ||
|
|
43a3eaf963 | ||
|
|
cc09083636 | ||
|
|
da8e2912b1 | ||
|
|
18d295c9e0 | ||
|
|
17ca19ab60 | ||
|
|
41bd0dc4d7 | ||
|
|
a0a7c01542 | ||
|
|
45b2ee6f4f | ||
|
|
a538772969 | ||
|
|
30031be974 | ||
|
|
9acca71237 | ||
|
|
d50ea3ce5a | ||
|
|
c61cf091a5 | ||
|
|
f737fb16d8 | ||
|
|
5e1a54f63e | ||
|
|
31c279a2a2 | ||
|
|
a4ad59ff2d | ||
|
|
b25d6cb963 | ||
|
|
3616300155 | ||
|
|
e4a8b1769e | ||
|
|
da880559a6 | ||
|
|
65e5c021e7 | ||
|
|
a9189510ba | ||
|
|
10fd9e6ee8 | ||
|
|
72671a212d | ||
|
|
376aa24b15 | ||
|
|
c9d14bd22a | ||
|
|
149eb0bbf3 | ||
|
|
9ebac35577 | ||
|
|
8b37c58f8b | ||
|
|
d3bb187f01 | ||
|
|
44699d10dc | ||
|
|
a9c685453f | ||
|
|
c154302c58 | ||
|
|
5712943b76 | ||
|
|
39f32f1715 | ||
|
|
365b900605 | ||
|
|
c6b657867a | ||
|
|
a4f1683221 | ||
|
|
b6795fd310 | ||
|
|
2e269bd998 | ||
|
|
78a78fa74d | ||
|
|
0ba87dd279 | ||
|
|
05799a48c7 | ||
|
|
93abb7406b | ||
|
|
b23167e754 | ||
|
|
417cdaae08 | ||
|
|
b3eaab7ca2 | ||
|
|
a31d0fa6c3 | ||
|
|
cc2389c8ac | ||
|
|
20266508dd | ||
|
|
cc13293c28 | ||
|
|
989f47b631 | ||
|
|
7d5f919bad | ||
|
|
c62e64cf01 | ||
|
|
c085cc2def | ||
|
|
7708df8da0 | ||
|
|
b85faf6ffb | ||
|
|
203a06f855 | ||
|
|
6839ae1f6d | ||
|
|
c0cd13fb1c | ||
|
|
f14c233348 | ||
|
|
768a001781 | ||
|
|
acb1042a9f | ||
|
|
f40e32fb1a | ||
|
|
e61acb40b2 | ||
|
|
7e68567e50 | ||
|
|
f7efe6dc95 | ||
|
|
b1bde57bef | ||
|
|
88426d9446 | ||
|
|
f6a765ceb5 | ||
|
|
754c84e2e4 | ||
|
|
7aefd19afe | ||
|
|
fbbb5508ea | ||
|
|
c77df98b1a | ||
|
|
d27bde9883 | ||
|
|
0fe87a8730 | ||
|
|
3b161265ad | ||
|
|
389896df85 | ||
|
|
b032ff0f03 | ||
|
|
dad2210c0c | ||
|
|
9cfdbcbf3f | ||
|
|
7543c9c99b | ||
|
|
acacb57c7e | ||
|
|
776995bc10 | ||
|
|
8b008d6254 | ||
|
|
83c4970e52 | ||
|
|
8aa0bd5d10 | ||
|
|
37e325b92f | ||
|
|
59d7de0da5 | ||
|
|
88d8928bf7 | ||
|
|
176a068cde | ||
|
|
5ab3534d44 | ||
|
|
cb73b8460c | ||
|
|
7481998b16 | ||
|
|
87ebab0615 | ||
|
|
355d781bed | ||
|
|
7287ab92f6 | ||
|
|
6becd2508c | ||
|
|
edfc7725b1 | ||
|
|
b382c1fc6a | ||
|
|
8a6b167723 | ||
|
|
253ac4ba6a | ||
|
|
84e0e33a19 | ||
|
|
ab4cbeff00 | ||
|
|
773c272d66 | ||
|
|
c3366fdfd0 | ||
|
|
5be214abed | ||
|
|
d37422f1db | ||
|
|
933ed882e9 | ||
|
|
a1d9aca338 | ||
|
|
91d54e9b99 | ||
|
|
76c3ceccfb | ||
|
|
ad68b16a1e | ||
|
|
f079514957 | ||
|
|
e9df3d42c4 | ||
|
|
d80ca5deaa | ||
|
|
1a3cd8ec35 | ||
|
|
990dd7b00f | ||
|
|
d83b0ad809 | ||
|
|
08e29b9f1f | ||
|
|
8e174ba7de | ||
|
|
05997b6e98 | ||
|
|
32a84bcf4e | ||
|
|
8300774c4a | ||
|
|
d7f9871469 | ||
|
|
13f930abc0 | ||
|
|
b23b503e22 | ||
|
|
e756f45ba0 | ||
|
|
8c53322cda | ||
|
|
193fb150b7 | ||
|
|
26fdfc3704 | ||
|
|
78d25e0b7c | ||
|
|
2a06bb4eb6 | ||
|
|
88fb942577 | ||
|
|
1cdda32998 | ||
|
|
3e01ce744a | ||
|
|
8e40b9d1ec | ||
|
|
2fb0f85868 | ||
|
|
a0e526ed4d | ||
|
|
8d1ddb0805 | ||
|
|
9bb856998b | ||
|
|
fbb7383306 | ||
|
|
ec54bd43f3 | ||
|
|
f74371a97d | ||
|
|
d5f043d127 | ||
|
|
fe74d5b592 | ||
|
|
119e40ef64 | ||
|
|
4455918e7f | ||
|
|
efa944f4bc | ||
|
|
e107c2b8cf | ||
|
|
ca2f6e14e6 | ||
|
|
c1edb853b0 | ||
|
|
2647c933b8 | ||
|
|
53006b35ea | ||
|
|
4b183d4962 | ||
|
|
3d667e0047 | ||
|
|
9a9006ba20 | ||
|
|
153e88a751 | ||
|
|
9fcd8ad1f2 | ||
|
|
6b71d186dd | ||
|
|
074b2fae90 | ||
|
|
06a9d68eb8 | ||
|
|
a4d6ead30f | ||
|
|
d1b5f3d79c | ||
|
|
da8d2de208 | ||
|
|
15e9e578c0 | ||
|
|
0ef3d47027 | ||
|
|
247c8dd4f5 | ||
|
|
032f22020c | ||
|
|
4af47a0003 | ||
|
|
9012d20b23 | ||
|
|
d61ef7f343 | ||
|
|
1c226ccdd4 | ||
|
|
8791e78ccc | ||
|
|
69f5fe45b9 | ||
|
|
0b5546c723 | ||
|
|
1fc089143c | ||
|
|
5424dbaf91 | ||
|
|
c733555106 | ||
|
|
81388c0954 | ||
|
|
df10bad267 | ||
|
|
f0f3fa028b | ||
|
|
22697a84f6 | ||
|
|
3ac5476430 | ||
|
|
e318b5b87a | ||
|
|
f549b18512 | ||
|
|
7c5e1701f6 | ||
|
|
16bed382fd | ||
|
|
3cf50fa8e9 | ||
|
|
f69b0554eb | ||
|
|
e74a3c6dcc | ||
|
|
7108221662 | ||
|
|
10dc85924a | ||
|
|
b05f0a50e0 | ||
|
|
3d79ebc8b7 | ||
|
|
b44cd29851 | ||
|
|
85a802969e | ||
|
|
72f96c5566 | ||
|
|
839e2a62ae | ||
|
|
28b8f57b4b | ||
|
|
dfc186d422 | ||
|
|
42ec478fc4 | ||
|
|
7991ae57a8 | ||
|
|
935bac1e4d | ||
|
|
c4cbd3bebd | ||
|
|
c53a18f016 | ||
|
|
71df9b7fd5 | ||
|
|
c9f5ce5118 | ||
|
|
ddf1e22d48 | ||
|
|
0e96b408b9 | ||
|
|
ba72399723 | ||
|
|
9bcfe33be7 | ||
|
|
71eb82d1b2 | ||
|
|
a9d069f5b8 | ||
|
|
48652590ec | ||
|
|
86f557b636 | ||
|
|
c0caa80515 | ||
|
|
0d95d8b00a | ||
|
|
9d52bf65ff | ||
|
|
d761dfd059 | ||
|
|
27c0f899c8 | ||
|
|
7ff2fafe47 | ||
|
|
3b021eacef | ||
|
|
f352a09778 | ||
|
|
02b2f9fa7d | ||
|
|
29ca408219 | ||
|
|
8486540257 | ||
|
|
ed027fd9d8 | ||
|
|
352e7d9873 | ||
|
|
9a0416c6a5 | ||
|
|
f5a9e9df0d | ||
|
|
f96a3fb7d3 | ||
|
|
bc87dac75f | ||
|
|
9f14daf22b | ||
|
|
784320c98c | ||
|
|
d0d74b7197 | ||
|
|
64c464a144 | ||
|
|
4de88a6a36 | ||
|
|
105bfd90f5 | ||
|
|
6368e2e639 | ||
|
|
a4894d3e25 | ||
|
|
d7b460d0e5 | ||
|
|
171a31dbe8 | ||
|
|
83cc7b8aae | ||
|
|
0a4b2f4180 | ||
|
|
a8c754cc00 | ||
|
|
bc5c2f8a2c | ||
|
|
d965856235 | ||
|
|
08270da5c3 | ||
|
|
5e39fb982e | ||
|
|
8b644025b1 | ||
|
|
7aaf4cd2a8 | ||
|
|
8522226d2f | ||
|
|
f4b2c59cfe | ||
|
|
7c8c63529e | ||
|
|
e4221b700f | ||
|
|
bd7e919a75 | ||
|
|
f7fc8d39e9 | ||
|
|
a6858cda29 | ||
|
|
17fc3dc48a | ||
|
|
3f5c216969 | ||
|
|
e72e48c53f | ||
|
|
0cf643b234 | ||
|
|
dc3028d233 | ||
|
|
4dc23a8051 | ||
|
|
495322b95b | ||
|
|
c789fb7787 | ||
|
|
ed6bec168d | ||
|
|
0d8affc17f | ||
|
|
d9df9b4919 | ||
|
|
efdc45a6ea | ||
|
|
86973308cd | ||
|
|
c61473c1d6 | ||
|
|
8fddc232bf | ||
|
|
fad689c7b6 | ||
|
|
db6fa6960c | ||
|
|
3b87f4d943 | ||
|
|
581e86b512 | ||
|
|
8196182a12 | ||
|
|
9b383177c9 | ||
|
|
fbb0ee7747 | ||
|
|
c7e4ab278a | ||
|
|
e9ce4e9250 | ||
|
|
5da08bde9e | ||
|
|
ff48fc04d0 | ||
|
|
46d09f8707 | ||
|
|
db4678e448 | ||
|
|
a349d4d641 | ||
|
|
ac8e69dd32 | ||
|
|
96b9e9cf62 | ||
|
|
cb1553e966 | ||
|
|
0d2a0ecac3 | ||
|
|
c94df4d19d | ||
|
|
728f4b5c2e | ||
|
|
8c188d5d09 | ||
|
|
e14ea7fbd9 | ||
|
|
7053aa3a48 | ||
|
|
049565df2e | ||
|
|
cc1d3bf96b | ||
|
|
5b9f253fa0 | ||
|
|
d715b0e413 | ||
|
|
6141346d18 | ||
|
|
59a0c35865 | ||
|
|
da9a60ca0d | ||
|
|
0d113603ac | ||
|
|
2e30b46fe4 | ||
|
|
68a9a450d4 | ||
|
|
ed13a772d7 | ||
|
|
78545664bf | ||
|
|
f72218c199 | ||
|
|
58fb927ebd | ||
|
|
62b8dac490 | ||
|
|
682b4524bf | ||
|
|
9da6612b0f | ||
|
|
e63faa101c | ||
|
|
497074f044 | ||
|
|
c90c5b9bdd | ||
|
|
ad97487606 | ||
|
|
e091fb92da | ||
|
|
c9bd65185c | ||
|
|
c66ed4e2e5 | ||
|
|
2530b68d44 | ||
|
|
7d61d2306e | ||
|
|
385adffcf5 | ||
|
|
0c908911f9 | ||
|
|
c13a301a94 | ||
|
|
f47cf86eff | ||
|
|
7a26ce2641 | ||
|
|
3639df54c3 | ||
|
|
a4713ba96d | ||
|
|
5318156f1c | ||
|
|
d5d1df8afd | ||
|
|
cd5df121f3 | ||
|
|
73ac0e6b85 | ||
|
|
a7ddbc0475 | ||
|
|
8fab23301c | ||
|
|
1338ae3ba3 | ||
|
|
63c547d71c | ||
|
|
814bba3933 | ||
|
|
2576d53a31 | ||
|
|
217753f4aa | ||
|
|
42a44f01c3 | ||
|
|
9b9dad119a | ||
|
|
6dca2aa66d | ||
|
|
6678a4f0b3 | ||
|
|
d51b2816e3 | ||
|
|
34f00179db | ||
|
|
5225df50cf | ||
|
|
94dc8604dd | ||
|
|
a71b812f53 | ||
|
|
c6989aa3ae | ||
|
|
a79bf78397 | ||
|
|
82fb2357d9 | ||
|
|
13b2ae29c2 | ||
|
|
36069409ec | ||
|
|
0468a3b325 | ||
|
|
d509c1f5a3 | ||
|
|
2c98d99818 | ||
|
|
226c0f3a54 | ||
|
|
ade1fa70cb | ||
|
|
4c9a1a3ba5 | ||
|
|
1d55ebabc9 | ||
|
|
f324fe8c59 | ||
|
|
866f037344 | ||
|
|
5d14b73491 | ||
|
|
540236ce11 | ||
|
|
7b0127e1e1 | ||
|
|
f99bbfc983 | ||
|
|
3b55aaac59 | ||
|
|
2e565f5bca | ||
|
|
e02e6d86db | ||
|
|
867c66ff97 | ||
|
|
f03940963e | ||
|
|
09c127ff83 | ||
|
|
aebb4f4ba7 | ||
|
|
bf2e1ec67a | ||
|
|
98d4ec1ef2 | ||
|
|
1305b659ef | ||
|
|
57fb88093e | ||
|
|
4e0511f27d | ||
|
|
304ad45a9b | ||
|
|
878eac3e2e | ||
|
|
34859e4b32 | ||
|
|
143a2ccab3 | ||
|
|
1e0daeb314 | ||
|
|
7f5b3cb8b3 | ||
|
|
c53e5cf59f | ||
|
|
c7f540ea1e | ||
|
|
12f153a827 | ||
|
|
0d887f273a | ||
|
|
4d37720a0c | ||
|
|
dd4411aac2 | ||
|
|
1d77d8ce07 | ||
|
|
a057779d5e | ||
|
|
7474e4531e | ||
|
|
d3a3d7f0cc | ||
|
|
8671f995cc | ||
|
|
4a61501db9 | ||
|
|
7244895bde | ||
|
|
177662e0f2 | ||
|
|
f48ab881f6 | ||
|
|
eb2d9504b9 | ||
|
|
8a04054647 | ||
|
|
8b7fb8b60d | ||
|
|
a83333c432 | ||
|
|
573a98d6f0 | ||
|
|
af7a5eef2f | ||
|
|
576faf00b2 | ||
|
|
81b6102d20 | ||
|
|
acf306d1f9 | ||
|
|
20a7304e4c | ||
|
|
2e0f8d4f6e | ||
|
|
7e378287c4 | ||
|
|
9cc5aed990 | ||
|
|
48f535f5f8 | ||
|
|
8dbad2a439 | ||
|
|
11398b922c | ||
|
|
dfea94f8f6 | ||
|
|
f1aae71568 | ||
|
|
a5642f2c4a | ||
|
|
10e2eb4f81 | ||
|
|
c9eba8075f | ||
|
|
9d69c4e4b4 | ||
|
|
292fdad297 | ||
|
|
c04cc2e28e | ||
|
|
7a32c70d13 | ||
|
|
709ee21417 | ||
|
|
1fb53b946c | ||
|
|
1dd18a8808 | ||
|
|
0a5095fe8d | ||
|
|
0f60ba6e65 | ||
|
|
1534aba865 | ||
|
|
0ca0f88121 | ||
|
|
0500ee3d81 | ||
|
|
46a5b335e7 | ||
|
|
914491b8e0 | ||
|
|
ab029d7e92 | ||
|
|
0bd5a039ea | ||
|
|
5c8b2ee9ec | ||
|
|
faf7863bb0 | ||
|
|
d42763a443 | ||
|
|
3c757d5ed2 | ||
|
|
f55523cfdd | ||
|
|
32972518da | ||
|
|
2e7675489f | ||
|
|
80eb0bd9b9 | ||
|
|
4cca2eb1bf | ||
|
|
1c09783f7a | ||
|
|
163281178a | ||
|
|
2fa669f759 | ||
|
|
8ca48a1a54 | ||
|
|
b27bc13af6 | ||
|
|
f7c5a5e967 | ||
|
|
fada8272b6 | ||
|
|
46d72cd2c7 | ||
|
|
19b4e59a1e | ||
|
|
dab284f80f | ||
|
|
9665f15a96 | ||
|
|
2b24afa6d7 | ||
|
|
3166e6840c | ||
|
|
8817a80d3a | ||
|
|
5736d79172 | ||
|
|
fc2ba496fd | ||
|
|
2b9d02167f | ||
|
|
2314b4d89f | ||
|
|
1060f82f89 | ||
|
|
22df97f9c5 | ||
|
|
9c935fbc72 | ||
|
|
deae7c1711 | ||
|
|
941e881e1f | ||
|
|
0cb0fdbbfe | ||
|
|
0831d95c46 | ||
|
|
c26f9b991a | ||
|
|
0c0b78b273 | ||
|
|
3ffb2f5bea | ||
|
|
ae1035646a | ||
|
|
1015ceeeaf | ||
|
|
17ffed1842 | ||
|
|
be9c0884d7 | ||
|
|
48c8424bd9 | ||
|
|
7657ec7ed6 | ||
|
|
07a1250e0e | ||
|
|
69082b38dc | ||
|
|
aa824dd10b | ||
|
|
a12d03e15d | ||
|
|
1a7c9fad9f | ||
|
|
3c7a276234 | ||
|
|
d6f8871964 | ||
|
|
5469a4ab11 | ||
|
|
2c475e48b5 | ||
|
|
7c6eb424d3 | ||
|
|
adba24d207 | ||
|
|
5d7c7d6569 | ||
|
|
d2c8aadf79 | ||
|
|
1ac7f46184 | ||
|
|
05deb747bb | ||
|
|
b505e8517a | ||
|
|
f2e9fa3ef7 | ||
|
|
50a399326f | ||
|
|
1ff88b7aec | ||
|
|
825d3ce386 | ||
|
|
92aa6d6883 | ||
|
|
b2a4db425b | ||
|
|
de49cdbe9d | ||
|
|
9f9c85dda4 | ||
|
|
11734714c2 | ||
|
|
b86ca447ce | ||
|
|
f8c7ba9984 | ||
|
|
76f2bb175d | ||
|
|
f26af78a8a | ||
|
|
bfbecd1174 | ||
|
|
9bd13fe5bb | ||
|
|
459262ac97 | ||
|
|
82ea226c61 | ||
|
|
da4db748fa | ||
|
|
e1eabd7beb | ||
|
|
d81ba7d491 | ||
|
|
5135ed3d4a | ||
|
|
c4b2df872d | ||
|
|
224b5a35f7 | ||
|
|
50ac0e5416 | ||
|
|
e0992d5558 | ||
|
|
5e01315aa1 | ||
|
|
4e4982ab5b | ||
|
|
89e4d86171 | ||
|
|
a1af516259 | ||
|
|
1d64a59547 | ||
|
|
ca7f8b8f31 | ||
|
|
164b03c486 | ||
|
|
e5458d1d88 | ||
|
|
b5e7a2e69d | ||
|
|
2516cafb28 | ||
|
|
fd404bec7e | ||
|
|
fe7866d0ed | ||
|
|
5314b52192 | ||
|
|
13db4e7b9e | ||
|
|
07275b708b | ||
|
|
b85703d11a | ||
|
|
992dc6b486 | ||
|
|
822d66e591 | ||
|
|
8d1ad6378f | ||
|
|
2d1019542a | ||
|
|
b25cac650f | ||
|
|
90a1df305b | ||
|
|
0a6b4b82e9 | ||
|
|
1704c47ba8 | ||
|
|
b76e9cedb3 | ||
|
|
48c88e088c | ||
|
|
a831c2ea90 | ||
|
|
be13a6e525 | ||
|
|
8a3da4c68c | ||
|
|
4d37d4a77c | ||
|
|
7d3b98be4c | ||
|
|
2b3e43e247 | ||
|
|
f60ef66371 | ||
|
|
25836db6be | ||
|
|
587021cd9f | ||
|
|
580ce00782 | ||
|
|
2f1a299c50 | ||
|
|
f6ca640b12 | ||
|
|
3ce2933693 | ||
|
|
c200096c03 | ||
|
|
6d3e7424bf | ||
|
|
5c6d2ef9d1 | ||
|
|
460eb9c50e | ||
|
|
9fd03a1696 | ||
|
|
55937202b7 | ||
|
|
1e4fca9a87 | ||
|
|
49b4ceaedf | ||
|
|
d711839760 | ||
|
|
48732becfe | ||
|
|
6440c45ff3 | ||
|
|
ef6342bd07 | ||
|
|
e183bb8c9b | ||
|
|
7695f5a0a7 | ||
|
|
cb7cc448c0 | ||
|
|
63be30e3e0 | ||
|
|
43cf982ac3 | ||
|
|
7e82397441 | ||
|
|
66c4afd828 | ||
|
|
0e0ce898f6 | ||
|
|
a6125983ab | ||
|
|
8f84770acd | ||
|
|
62b58c0936 | ||
|
|
8f53dc44a0 | ||
|
|
1cddfdc52b | ||
|
|
cea4b857f0 | ||
|
|
ffcd62c289 | ||
|
|
a1c5bd82ec | ||
|
|
5da42f2b9b | ||
|
|
1155ecef29 | ||
|
|
96623ab5c6 | ||
|
|
7e798d725e | ||
|
|
8420a4d063 | ||
|
|
b5e9a641f5 | ||
|
|
c220d9efc8 | ||
|
|
81e0195998 | ||
|
|
f1e2d4a9a2 | ||
|
|
3157158f76 | ||
|
|
16d4535abc | ||
|
|
2a5e5477bc | ||
|
|
e251986cbe | ||
|
|
f0ad6f8c51 | ||
|
|
70b2340909 | ||
|
|
115add4387 | ||
|
|
c4b6c5c7c9 | ||
|
|
c7dcf0b31e | ||
|
|
298d9c0e89 | ||
|
|
a416623436 | ||
|
|
b8ed0f15d4 | ||
|
|
22b22b7d5c | ||
|
|
1f6b90ed8d | ||
|
|
a3e9642116 | ||
|
|
43aebb7db4 | ||
|
|
061a17abd3 | ||
|
|
d380fc1614 | ||
|
|
ad26f15a06 | ||
|
|
aeaf905e22 | ||
|
|
97d9c79e92 | ||
|
|
f62f553d46 | ||
|
|
989a01c261 | ||
|
|
05e2243e80 | ||
|
|
4080efeb01 | ||
|
|
fc61aff41b | ||
|
|
fe0918bb65 | ||
|
|
b99ba3df09 | ||
|
|
7356a44443 | ||
|
|
a0c830f488 | ||
|
|
a6ca61d427 | ||
|
|
d8657ff76f | ||
|
|
5770293d25 | ||
|
|
0647d9251f | ||
|
|
be5c1ae862 | ||
|
|
bfd973ece3 | ||
|
|
1e8fe57e5c | ||
|
|
f14a2d8382 | ||
|
|
5fff2e576f | ||
|
|
f2e8dbcc00 | ||
|
|
8f97a15d1c | ||
|
|
47304e07dc | ||
|
|
565a4c5944 | ||
|
|
2ebe6fefbe | ||
|
|
5f2a7f7c4a | ||
|
|
30389593c2 | ||
|
|
d4ada3574e | ||
|
|
e1bd953f45 | ||
|
|
98a60600b2 | ||
|
|
e325a21a1f | ||
|
|
3df4f81dfe | ||
|
|
31b532a1f2 | ||
|
|
daef791100 | ||
|
|
a6bcaf71fc | ||
|
|
4f04be6add | ||
|
|
8dc5930511 | ||
|
|
b4daacb4ec | ||
|
|
6a7d3a0a09 | ||
|
|
c646d76f67 | ||
|
|
07b47084ba | ||
|
|
4f547d6d2c | ||
|
|
2eae7d507c | ||
|
|
1cdf69c57e | ||
|
|
b6cd135ac2 | ||
|
|
befcac11a0 | ||
|
|
7f71cee020 | ||
|
|
db5f248204 | ||
|
|
871a8929bc | ||
|
|
edebb65170 | ||
|
|
f640e42ffa | ||
|
|
59f63c8f0f | ||
|
|
bfbb5a1bb1 | ||
|
|
051d6b450c | ||
|
|
67685a541d | ||
|
|
964b5493a4 | ||
|
|
3955b20703 | ||
|
|
f1042989c1 | ||
|
|
e2884db36a | ||
|
|
2c646fe42c | ||
|
|
693f060040 | ||
|
|
3bec830a59 | ||
|
|
7d0f6f0c45 | ||
|
|
26bafe7028 | ||
|
|
0cd2810379 | ||
|
|
0f7247f88e | ||
|
|
2dc4970e08 | ||
|
|
4f08e58655 | ||
|
|
dcbf7394ab | ||
|
|
c40f327a16 | ||
|
|
81bf0943ea | ||
|
|
b79f9e302d | ||
|
|
bc83b4b06c | ||
|
|
8ef5af1942 | ||
|
|
6929b41a21 | ||
|
|
0b5583b112 | ||
|
|
135f05ef66 | ||
|
|
c6e07cf1e1 | ||
|
|
ce7f6aa660 | ||
|
|
1765c6039e | ||
|
|
fbb888a3d5 | ||
|
|
2aab569f1c | ||
|
|
2e2c60c4ba | ||
|
|
306770819e | ||
|
|
dfa6661e0f | ||
|
|
24093d52a7 | ||
|
|
f5e438a976 | ||
|
|
d08e1e6875 | ||
|
|
956f1cf805 | ||
|
|
129dfa5f45 | ||
|
|
3df6a603e4 | ||
|
|
a7dc6a89f6 | ||
|
|
5200976949 | ||
|
|
e3e606de12 | ||
|
|
88f60feb32 | ||
|
|
a904a7f8c6 | ||
|
|
49afc1d84a | ||
|
|
6edf28081f | ||
|
|
5f2da312fa | ||
|
|
eb2333bce1 | ||
|
|
660c0c4efd | ||
|
|
fe588ce8ef | ||
|
|
26b92a919d | ||
|
|
8f47b39b27 | ||
|
|
2f1b7afe32 | ||
|
|
dd634acd71 | ||
|
|
ebf99aaf70 | ||
|
|
cbd4f237b4 | ||
|
|
418bbfd722 | ||
|
|
45e8a04e48 | ||
|
|
0f44636597 | ||
|
|
7a7eeb1005 | ||
|
|
4e7f375c94 | ||
|
|
f5ea47488a | ||
|
|
134c913cca | ||
|
|
56b5b832bf | ||
|
|
cb794ee010 | ||
|
|
6d645b5577 | ||
|
|
563e0bf82a | ||
|
|
d816f61fbf | ||
|
|
4019bf0525 | ||
|
|
65ea4cba29 | ||
|
|
17a23f0930 | ||
|
|
258d88f301 | ||
|
|
a3fb1ca5ab | ||
|
|
1275aeb955 | ||
|
|
170a031386 | ||
|
|
65493f64e1 | ||
|
|
63e66cd0ad | ||
|
|
f2df407165 | ||
|
|
ca9def714a | ||
|
|
47cdc68e03 | ||
|
|
7b84d6f9b3 | ||
|
|
12a1b2254d | ||
|
|
6154438178 | ||
|
|
168bbc4f38 | ||
|
|
a3976e0760 | ||
|
|
385f7f3895 | ||
|
|
5c0dc6e603 | ||
|
|
284a60c516 | ||
|
|
44f14eb43e | ||
|
|
ca9f1df253 | ||
|
|
a63b35a60c | ||
|
|
28cdb605aa | ||
|
|
5b836d4739 | ||
|
|
84a251e1f5 | ||
|
|
9d339c41e2 | ||
|
|
ae61d108dd | ||
|
|
47046464fa | ||
|
|
b1f94422cc | ||
|
|
c2c8921b41 | ||
|
|
844086505f | ||
|
|
63da2d0911 | ||
|
|
1db1461272 | ||
|
|
5fb450a64c | ||
|
|
6d916fe709 | ||
|
|
2c60eae899 | ||
|
|
962ffcf89c | ||
|
|
8a40bffaf9 | ||
|
|
e08f72e675 | ||
|
|
1685d46007 | ||
|
|
8d214c484c | ||
|
|
9eef7c4e55 | ||
|
|
bbae437723 | ||
|
|
30d22d775b | ||
|
|
c043c24625 | ||
|
|
74900105be | ||
|
|
d1bf2e199c | ||
|
|
c800598cd1 | ||
|
|
14f25df2b6 | ||
|
|
54007a45f1 | ||
|
|
ac66811112 | ||
|
|
3c5386cd71 | ||
|
|
bc40160883 | ||
|
|
379a4f161d | ||
|
|
06cc8f103b | ||
|
|
34baaced11 | ||
|
|
9809740ba5 | ||
|
|
f67baae17e | ||
|
|
37e40d693b | ||
|
|
0c36dc00d7 | ||
|
|
28163422a6 | ||
|
|
1ac4fd80c8 | ||
|
|
885fe351fb | ||
|
|
f92347c312 | ||
|
|
a86e01e743 | ||
|
|
1ed70fd0b7 | ||
|
|
def4973ae7 | ||
|
|
0af80bcf70 | ||
|
|
eff4275925 | ||
|
|
998a3cae0c | ||
|
|
471d0367c7 | ||
|
|
3975b4d2e8 | ||
|
|
230d5c8239 | ||
|
|
e4afcfde08 | ||
|
|
8372be7469 | ||
|
|
57e0f077a6 | ||
|
|
f0500bd1e4 | ||
|
|
95032f302c | ||
|
|
8102a5991b | ||
|
|
c27eaf8920 | ||
|
|
dfb855b42d | ||
|
|
5df1444255 | ||
|
|
612f2be5d3 | ||
|
|
6d1b34896e | ||
|
|
7b2c3f47c6 | ||
|
|
8aa0e7cd96 | ||
|
|
695b28afaa | ||
|
|
0a4fb0d3fe | ||
|
|
8072ef2bbd | ||
|
|
40268a7974 | ||
|
|
697ebe4d31 | ||
|
|
38d86f4d45 | ||
|
|
f254d6ccd9 | ||
|
|
f0bc6e2019 | ||
|
|
9fde8a6b12 | ||
|
|
612e31f5ea | ||
|
|
7a2e40dd48 | ||
|
|
60ba603ab5 | ||
|
|
a79cba0c95 | ||
|
|
4f2a58c9c5 | ||
|
|
44a6fcff39 | ||
|
|
bf1824b391 | ||
|
|
a70635b8a1 | ||
|
|
e121e3cee7 | ||
|
|
7e9a612585 | ||
|
|
0df111a371 | ||
|
|
a39a7ba8d6 | ||
|
|
7e88d7d78f | ||
|
|
f0c9fb9682 | ||
|
|
560738f34d | ||
|
|
99d10bf607 | ||
|
|
145c5a83a8 | ||
|
|
2cb1982043 | ||
|
|
fccf90e7f3 | ||
|
|
d32f30ac48 | ||
|
|
e3aae45a6f | ||
|
|
f3c0c77304 | ||
|
|
79e591b59b | ||
|
|
21a73e9f39 | ||
|
|
4ce05f5759 | ||
|
|
2523702718 | ||
|
|
55baa67c7c | ||
|
|
64fa820ccf | ||
|
|
56ba69e4c9 | ||
|
|
d05460e5fe | ||
|
|
14c3a98049 | ||
|
|
e0a4a3d5bf | ||
|
|
62b2b736e7 | ||
|
|
6837633a4a | ||
|
|
2ae778b8fc | ||
|
|
c82a4a8fce | ||
|
|
6e7c9201cd | ||
|
|
bde0132e15 | ||
|
|
233ad894d3 | ||
|
|
0d6bafbfa7 | ||
|
|
36195c4461 | ||
|
|
65141660ab | ||
|
|
dec30912a7 | ||
|
|
5ec1b6b716 | ||
|
|
e0ab98541c | ||
|
|
35faefee5d | ||
|
|
b7c47b7438 | ||
|
|
00bbc5f177 | ||
|
|
0bea4fd807 | ||
|
|
b5770743fe | ||
|
|
1890fc6389 | ||
|
|
c4910024f3 | ||
|
|
c7a7baaa13 | ||
|
|
e50c3500b4 | ||
|
|
09d02ea429 | ||
|
|
ac05fb9338 | ||
|
|
28786529dc | ||
|
|
6b0b0a289a | ||
|
|
f95b9dee45 | ||
|
|
617f658b7e | ||
|
|
8a7f6d7a15 | ||
|
|
9c0412cf6b | ||
|
|
84131d0351 | ||
|
|
1cd6cba306 | ||
|
|
661e7253a2 | ||
|
|
222a230871 | ||
|
|
ee27297f82 | ||
|
|
ee164987c7 | ||
|
|
0fe51254cb | ||
|
|
52023f1291 | ||
|
|
5bbe631e04 | ||
|
|
2c6dcb65fb | ||
|
|
520876fa09 | ||
|
|
0bf9dc1e35 | ||
|
|
829bbd1d05 | ||
|
|
8a82af3511 | ||
|
|
8246f8402b | ||
|
|
6b9e832db7 | ||
|
|
d2ff2c91bb | ||
|
|
7879e79d11 | ||
|
|
8a3e7b1c95 | ||
|
|
d9473db78a | ||
|
|
11233f2afd | ||
|
|
3a85e9cee9 | ||
|
|
c4a62b99f6 | ||
|
|
b5899f4f19 | ||
|
|
92922fe7f9 | ||
|
|
c487cf0010 | ||
|
|
415f8d51a8 | ||
|
|
ca6d59d2c1 | ||
|
|
1a8cc83735 | ||
|
|
2762dbb17e | ||
|
|
666c36d58d | ||
|
|
854b0d325e | ||
|
|
79c318937b | ||
|
|
88d62206b4 | ||
|
|
e79969b242 | ||
|
|
53973b4d2c | ||
|
|
b801cd7179 | ||
|
|
0b9c08b47b | ||
|
|
2f97cc615b | ||
|
|
2dd5a2e3a1 | ||
|
|
23326151c4 | ||
|
|
9e49146352 | ||
|
|
926ccc84ef | ||
|
|
b14d523558 | ||
|
|
21633673c3 | ||
|
|
80e8493ee7 | ||
|
|
aedaa455d9 | ||
|
|
d6bf1161db | ||
|
|
7896214c42 | ||
|
|
5792c950bf | ||
|
|
7a96d0b39c | ||
|
|
591bb9d355 | ||
|
|
5faf6528fb | ||
|
|
0fa7d2c8e4 | ||
|
|
b2a2d91310 | ||
|
|
490110c543 | ||
|
|
2414649192 | ||
|
|
5d5c0f7e99 | ||
|
|
b4d3738338 | ||
|
|
3ac7b66047 | ||
|
|
c9b2b368b3 | ||
|
|
e037c405ad | ||
|
|
4bf72cc1c9 | ||
|
|
25f0e68f97 | ||
|
|
3358f89361 | ||
|
|
f3b3fe16af | ||
|
|
d1c4f6d4da | ||
|
|
82d020804d | ||
|
|
7ddbf09c25 | ||
|
|
2e4585da92 | ||
|
|
8dcce6a89c | ||
|
|
494f52308b | ||
|
|
3d38b2d6d0 | ||
|
|
3a408f9d19 | ||
|
|
d76fa1f3d4 | ||
|
|
fe1daad3cb | ||
|
|
0f06bcd759 | ||
|
|
d239db0306 | ||
|
|
385ffb467b | ||
|
|
5f8ea7e0d8 | ||
|
|
d7a1aa00c6 | ||
|
|
5747d4f4e8 | ||
|
|
1f8b4ab733 | ||
|
|
d4736fdb43 | ||
|
|
895aeb71d7 | ||
|
|
4f28b537d9 | ||
|
|
6b70527f9d | ||
|
|
a0fe51d562 | ||
|
|
bd18c5d170 | ||
|
|
54044decd0 | ||
|
|
89f383c4ee | ||
|
|
91e5e839d3 | ||
|
|
10fa2471fc | ||
|
|
ff4d7860d5 | ||
|
|
4f7a98c565 | ||
|
|
b58f8d8f2c | ||
|
|
f963b7ab18 | ||
|
|
86925f6334 | ||
|
|
468f104ce7 | ||
|
|
cbc6ee10da | ||
|
|
6ef5ad9e29 | ||
|
|
bb58c9ed5c | ||
|
|
afac4caa7d | ||
|
|
b4f536626a | ||
|
|
e4fa34a13e | ||
|
|
1a7cd9c487 | ||
|
|
131e14dc66 | ||
|
|
6e634cbe42 | ||
|
|
3fe75fdc80 | ||
|
|
6f7563beb7 | ||
|
|
43d7f5a5d0 | ||
|
|
94aa064497 | ||
|
|
07689fc149 | ||
|
|
bfec31bec8 | ||
|
|
1d485a1a79 | ||
|
|
0a41f331cc | ||
|
|
e1e1ea54ae | ||
|
|
492272fed6 | ||
|
|
59f943cd50 | ||
|
|
0a5a191a2a | ||
|
|
4877f9055c | ||
|
|
a076c1f97a | ||
|
|
b3602f6824 | ||
|
|
779da8e31b | ||
|
|
997378f9df | ||
|
|
83bfb5e290 | ||
|
|
c171445431 | ||
|
|
4f80952353 | ||
|
|
e13945a2fe | ||
|
|
ca04e1bf49 | ||
|
|
00828e2c93 | ||
|
|
7ab56be2c7 | ||
|
|
059bc4db19 | ||
|
|
9196cbfe8b | ||
|
|
9cd080508d | ||
|
|
69b59b4b4b | ||
|
|
acbc642250 | ||
|
|
96b49af01c | ||
|
|
52c2af8298 | ||
|
|
a1ddaa899c | ||
|
|
6534298b12 | ||
|
|
90f4229409 | ||
|
|
b0f636beb4 | ||
|
|
d14b920c33 | ||
|
|
7774db5bf9 | ||
|
|
9b8ee23b99 | ||
|
|
62f6f1cbf2 | ||
|
|
e08585b0f8 | ||
|
|
2d3b3feb7e | ||
|
|
6f638d325e | ||
|
|
fdfc8149e1 | ||
|
|
1e9969f4f5 | ||
|
|
43cc91ad75 | ||
|
|
77f9033095 | ||
|
|
19a0394044 | ||
|
|
b6dc37fe2a | ||
|
|
3d3bb1688b | ||
|
|
2e25ce3a05 | ||
|
|
c854208ccf | ||
|
|
e06bd8800f | ||
|
|
abfecb7bc1 | ||
|
|
3b9d9f4374 | ||
|
|
affc4fefea | ||
|
|
583910682f | ||
|
|
a49e777d59 | ||
|
|
cda1bc5197 | ||
|
|
743f39750c | ||
|
|
66cf3e1001 | ||
|
|
b07897ef5b | ||
|
|
e5a998f368 | ||
|
|
f82711587c | ||
|
|
86e5f3ed2e | ||
|
|
f9934b9614 | ||
|
|
cfb0511d82 | ||
|
|
ab96d1ad1b | ||
|
|
5a727063c5 | ||
|
|
fcdb8d6e88 | ||
|
|
ca5300c7ed | ||
|
|
97ec5bc550 | ||
|
|
a25bca9f89 | ||
|
|
f894294636 | ||
|
|
98804d034d | ||
|
|
4abea8ca0a | ||
|
|
d46a3e7a12 | ||
|
|
2d2b5493ee |
70
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
70
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
@@ -1,7 +1,14 @@
|
|||||||
name: Broken site
|
name: Broken site support
|
||||||
description: Report broken or misfunctioning site
|
description: Report issue with yt-dlp on a supported site
|
||||||
labels: [triage, site-bug]
|
labels: [triage, site-bug]
|
||||||
body:
|
body:
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||||
|
description: Fill all fields even if you think it is irrelevant for the issue
|
||||||
|
options:
|
||||||
|
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||||
|
required: true
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -9,15 +16,15 @@ body:
|
|||||||
description: |
|
description: |
|
||||||
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
||||||
options:
|
options:
|
||||||
- label: I'm reporting a broken site
|
- label: I'm reporting that yt-dlp is broken on a **supported** site
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2022.04.08** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are alive and playable in a browser
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
@@ -26,37 +33,46 @@ body:
|
|||||||
id: region
|
id: region
|
||||||
attributes:
|
attributes:
|
||||||
label: Region
|
label: Region
|
||||||
description: "Enter the region the site is accessible from"
|
description: Enter the country/region that the site is accessible from
|
||||||
placeholder: "India"
|
placeholder: India
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
label: Description
|
label: Provide a description that is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Provide an explanation of your issue in an arbitrary form.
|
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
|
||||||
placeholder: WRITE DESCRIPTION HERE
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
- type: checkboxes
|
||||||
|
id: verbose
|
||||||
|
attributes:
|
||||||
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
options:
|
||||||
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
|
required: true
|
||||||
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
|
required: false
|
||||||
|
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||||
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: log
|
id: log
|
||||||
attributes:
|
attributes:
|
||||||
label: Verbose log
|
label: Complete Verbose Output
|
||||||
description: |
|
description: |
|
||||||
Provide the complete verbose output of yt-dlp **that clearly demonstrates the problem**.
|
It should start like this:
|
||||||
Add the `-vU` flag to your command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||||
[debug] Portable config file: yt-dlp.conf
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] Portable config: ['-i']
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||||
[debug] yt-dlp version 2022.04.08 (exe)
|
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
yt-dlp is up to date (2022.04.08)
|
[debug] Request Handlers: urllib, requests
|
||||||
|
[debug] Loaded 1893 extractors
|
||||||
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
||||||
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
|
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
validations:
|
validations:
|
||||||
|
|||||||
@@ -2,6 +2,13 @@ name: Site support request
|
|||||||
description: Request support for a new site
|
description: Request support for a new site
|
||||||
labels: [triage, site-request]
|
labels: [triage, site-request]
|
||||||
body:
|
body:
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||||
|
description: Fill all fields even if you think it is irrelevant for the issue
|
||||||
|
options:
|
||||||
|
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||||
|
required: true
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -11,13 +18,13 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: I'm reporting a new site support request
|
- label: I'm reporting a new site support request
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2022.04.08** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are alive and playable in a browser
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
@@ -26,8 +33,8 @@ body:
|
|||||||
id: region
|
id: region
|
||||||
attributes:
|
attributes:
|
||||||
label: Region
|
label: Region
|
||||||
description: "Enter the region the site is accessible from"
|
description: Enter the country/region that the site is accessible from
|
||||||
placeholder: "India"
|
placeholder: India
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: example-urls
|
id: example-urls
|
||||||
attributes:
|
attributes:
|
||||||
@@ -43,31 +50,41 @@ body:
|
|||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
label: Description
|
label: Provide a description that is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Provide any additional information
|
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||||
placeholder: WRITE DESCRIPTION HERE
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
- type: checkboxes
|
||||||
|
id: verbose
|
||||||
|
attributes:
|
||||||
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
options:
|
||||||
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
|
required: true
|
||||||
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
|
required: false
|
||||||
|
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||||
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: log
|
id: log
|
||||||
attributes:
|
attributes:
|
||||||
label: Verbose log
|
label: Complete Verbose Output
|
||||||
description: |
|
description: |
|
||||||
Provide the complete verbose output **using one of the example URLs provided above**.
|
It should start like this:
|
||||||
Add the `-vU` flag to your command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||||
[debug] Portable config file: yt-dlp.conf
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] Portable config: ['-i']
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||||
[debug] yt-dlp version 2022.04.08 (exe)
|
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
yt-dlp is up to date (2022.04.08)
|
[debug] Request Handlers: urllib, requests
|
||||||
|
[debug] Loaded 1893 extractors
|
||||||
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
||||||
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
|
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
validations:
|
validations:
|
||||||
|
|||||||
@@ -2,6 +2,13 @@ name: Site feature request
|
|||||||
description: Request a new functionality for a supported site
|
description: Request a new functionality for a supported site
|
||||||
labels: [triage, site-enhancement]
|
labels: [triage, site-enhancement]
|
||||||
body:
|
body:
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||||
|
description: Fill all fields even if you think it is irrelevant for the issue
|
||||||
|
options:
|
||||||
|
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||||
|
required: true
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -9,13 +16,13 @@ body:
|
|||||||
description: |
|
description: |
|
||||||
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
||||||
options:
|
options:
|
||||||
- label: I'm reporting a site feature request
|
- label: I'm requesting a site-specific feature
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2022.04.08** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are alive and playable in a browser
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
@@ -24,8 +31,8 @@ body:
|
|||||||
id: region
|
id: region
|
||||||
attributes:
|
attributes:
|
||||||
label: Region
|
label: Region
|
||||||
description: "Enter the region the site is accessible from"
|
description: Enter the country/region that the site is accessible from
|
||||||
placeholder: "India"
|
placeholder: India
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: example-urls
|
id: example-urls
|
||||||
attributes:
|
attributes:
|
||||||
@@ -39,33 +46,41 @@ body:
|
|||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
label: Description
|
label: Provide a description that is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Provide an explanation of your site feature request in an arbitrary form.
|
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||||
Please make sure the description is worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
|
||||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
|
||||||
placeholder: WRITE DESCRIPTION HERE
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
- type: checkboxes
|
||||||
|
id: verbose
|
||||||
|
attributes:
|
||||||
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
options:
|
||||||
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
|
required: true
|
||||||
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
|
required: false
|
||||||
|
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||||
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: log
|
id: log
|
||||||
attributes:
|
attributes:
|
||||||
label: Verbose log
|
label: Complete Verbose Output
|
||||||
description: |
|
description: |
|
||||||
Provide the complete verbose output of yt-dlp that demonstrates the need for the enhancement.
|
It should start like this:
|
||||||
Add the `-vU` flag to your command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||||
[debug] Portable config file: yt-dlp.conf
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] Portable config: ['-i']
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||||
[debug] yt-dlp version 2022.04.08 (exe)
|
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
yt-dlp is up to date (2022.04.08)
|
[debug] Request Handlers: urllib, requests
|
||||||
|
[debug] Loaded 1893 extractors
|
||||||
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
||||||
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
|
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
validations:
|
validations:
|
||||||
|
|||||||
63
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
63
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
@@ -1,7 +1,14 @@
|
|||||||
name: Bug report
|
name: Core bug report
|
||||||
description: Report a bug unrelated to any particular site or extractor
|
description: Report a bug unrelated to any particular site or extractor
|
||||||
labels: [triage, bug]
|
labels: [triage, bug]
|
||||||
body:
|
body:
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||||
|
description: Fill all fields even if you think it is irrelevant for the issue
|
||||||
|
options:
|
||||||
|
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||||
|
required: true
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -11,46 +18,54 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: I'm reporting a bug unrelated to a specific site
|
- label: I'm reporting a bug unrelated to a specific site
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2022.04.08** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are alive and playable in a browser
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
label: Description
|
label: Provide a description that is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Provide an explanation of your issue in an arbitrary form.
|
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||||
Please make sure the description is worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
|
||||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
|
||||||
placeholder: WRITE DESCRIPTION HERE
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
- type: checkboxes
|
||||||
|
id: verbose
|
||||||
|
attributes:
|
||||||
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
options:
|
||||||
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
|
required: true
|
||||||
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
|
required: false
|
||||||
|
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||||
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: log
|
id: log
|
||||||
attributes:
|
attributes:
|
||||||
label: Verbose log
|
label: Complete Verbose Output
|
||||||
description: |
|
description: |
|
||||||
Provide the complete verbose output of yt-dlp **that clearly demonstrates the problem**.
|
It should start like this:
|
||||||
Add the `-vU` flag to **your** command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||||
[debug] Portable config file: yt-dlp.conf
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] Portable config: ['-i']
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||||
[debug] yt-dlp version 2022.04.08 (exe)
|
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
yt-dlp is up to date (2022.04.08)
|
[debug] Request Handlers: urllib, requests
|
||||||
|
[debug] Loaded 1893 extractors
|
||||||
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
||||||
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
|
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
validations:
|
validations:
|
||||||
|
|||||||
57
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
57
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
@@ -2,6 +2,13 @@ name: Feature request
|
|||||||
description: Request a new functionality unrelated to any particular site or extractor
|
description: Request a new functionality unrelated to any particular site or extractor
|
||||||
labels: [triage, enhancement]
|
labels: [triage, enhancement]
|
||||||
body:
|
body:
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||||
|
description: Fill all fields even if you think it is irrelevant for the issue
|
||||||
|
options:
|
||||||
|
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||||
|
required: true
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -9,45 +16,51 @@ body:
|
|||||||
description: |
|
description: |
|
||||||
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
||||||
options:
|
options:
|
||||||
- label: I'm reporting a feature request
|
- label: I'm requesting a feature unrelated to a specific site
|
||||||
required: true
|
required: true
|
||||||
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2022.04.08** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
label: Description
|
label: Provide a description that is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Provide an explanation of your site feature request in an arbitrary form.
|
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||||
Please make sure the description is worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
|
||||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
|
||||||
placeholder: WRITE DESCRIPTION HERE
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
- type: checkboxes
|
||||||
|
id: verbose
|
||||||
|
attributes:
|
||||||
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
options:
|
||||||
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
|
required: false
|
||||||
|
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: log
|
id: log
|
||||||
attributes:
|
attributes:
|
||||||
label: Verbose log
|
label: Complete Verbose Output
|
||||||
description: |
|
description: |
|
||||||
If your feature request involves an existing yt-dlp command, provide the complete verbose output of that command.
|
It should start like this:
|
||||||
Add the `-vU` flag to **your** command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||||
[debug] Portable config file: yt-dlp.conf
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] Portable config: ['-i']
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||||
[debug] yt-dlp version 2021.12.01 (exe)
|
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
yt-dlp is up to date (2021.12.01)
|
[debug] Request Handlers: urllib, requests
|
||||||
|
[debug] Loaded 1893 extractors
|
||||||
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
||||||
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
|
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
|
|||||||
67
.github/ISSUE_TEMPLATE/6_question.yml
vendored
67
.github/ISSUE_TEMPLATE/6_question.yml
vendored
@@ -2,6 +2,19 @@ name: Ask question
|
|||||||
description: Ask yt-dlp related question
|
description: Ask yt-dlp related question
|
||||||
labels: [question]
|
labels: [question]
|
||||||
body:
|
body:
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||||
|
description: Fill all fields even if you think it is irrelevant for the issue
|
||||||
|
options:
|
||||||
|
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||||
|
required: true
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
### Make sure you are **only** asking a question and not reporting a bug or requesting a feature.
|
||||||
|
If your question contains "isn't working" or "can you add", this is most likely the wrong template.
|
||||||
|
If you are in doubt whether this is the right template, **USE ANOTHER TEMPLATE**!
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -9,45 +22,51 @@ body:
|
|||||||
description: |
|
description: |
|
||||||
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
||||||
options:
|
options:
|
||||||
- label: I'm asking a question and **not** reporting a bug/feature request
|
- label: I'm asking a question and **not** reporting a bug or requesting a feature
|
||||||
required: true
|
required: true
|
||||||
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions including closed ones
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions **including closed ones**. DO NOT post duplicates
|
||||||
|
required: true
|
||||||
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: question
|
id: question
|
||||||
attributes:
|
attributes:
|
||||||
label: Question
|
label: Please make sure the question is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Ask your question in an arbitrary form.
|
placeholder: Provide any additional information and as much context and examples as possible
|
||||||
Please make sure it's worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
|
||||||
Provide any additional information and as much context and examples as possible.
|
|
||||||
If your question contains "isn't working" or "can you add", this is most likely the wrong template.
|
|
||||||
If you are in doubt if this is the right template, use another template!
|
|
||||||
placeholder: WRITE QUESTION HERE
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
- type: checkboxes
|
||||||
|
id: verbose
|
||||||
|
attributes:
|
||||||
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
options:
|
||||||
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
|
required: false
|
||||||
|
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: log
|
id: log
|
||||||
attributes:
|
attributes:
|
||||||
label: Verbose log
|
label: Complete Verbose Output
|
||||||
description: |
|
description: |
|
||||||
If your question involves a yt-dlp command, provide the complete verbose output of that command.
|
It should start like this:
|
||||||
Add the `-vU` flag to **your** command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||||
[debug] Portable config file: yt-dlp.conf
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] Portable config: ['-i']
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||||
[debug] yt-dlp version 2021.12.01 (exe)
|
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
yt-dlp is up to date (2021.12.01)
|
[debug] Request Handlers: urllib, requests
|
||||||
|
[debug] Loaded 1893 extractors
|
||||||
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
||||||
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
|
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
|
|||||||
51
.github/ISSUE_TEMPLATE_tmpl/1_broken_site.yml
vendored
51
.github/ISSUE_TEMPLATE_tmpl/1_broken_site.yml
vendored
@@ -1,7 +1,8 @@
|
|||||||
name: Broken site
|
name: Broken site support
|
||||||
description: Report broken or misfunctioning site
|
description: Report issue with yt-dlp on a supported site
|
||||||
labels: [triage, site-bug]
|
labels: [triage, site-bug]
|
||||||
body:
|
body:
|
||||||
|
%(no_skip)s
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -9,15 +10,15 @@ body:
|
|||||||
description: |
|
description: |
|
||||||
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
||||||
options:
|
options:
|
||||||
- label: I'm reporting a broken site
|
- label: I'm reporting that yt-dlp is broken on a **supported** site
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are alive and playable in a browser
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
@@ -26,38 +27,14 @@ body:
|
|||||||
id: region
|
id: region
|
||||||
attributes:
|
attributes:
|
||||||
label: Region
|
label: Region
|
||||||
description: "Enter the region the site is accessible from"
|
description: Enter the country/region that the site is accessible from
|
||||||
placeholder: "India"
|
placeholder: India
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
label: Description
|
label: Provide a description that is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Provide an explanation of your issue in an arbitrary form.
|
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
|
||||||
placeholder: WRITE DESCRIPTION HERE
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: log
|
|
||||||
attributes:
|
|
||||||
label: Verbose log
|
|
||||||
description: |
|
|
||||||
Provide the complete verbose output of yt-dlp **that clearly demonstrates the problem**.
|
|
||||||
Add the `-vU` flag to your command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
|
||||||
[debug] Portable config file: yt-dlp.conf
|
|
||||||
[debug] Portable config: ['-i']
|
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
|
||||||
[debug] yt-dlp version %(version)s (exe)
|
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
|
||||||
[debug] Proxy map: {}
|
|
||||||
yt-dlp is up to date (%(version)s)
|
|
||||||
<more lines>
|
|
||||||
render: shell
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
%(verbose)s
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ name: Site support request
|
|||||||
description: Request support for a new site
|
description: Request support for a new site
|
||||||
labels: [triage, site-request]
|
labels: [triage, site-request]
|
||||||
body:
|
body:
|
||||||
|
%(no_skip)s
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -11,13 +12,13 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: I'm reporting a new site support request
|
- label: I'm reporting a new site support request
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are alive and playable in a browser
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
@@ -26,8 +27,8 @@ body:
|
|||||||
id: region
|
id: region
|
||||||
attributes:
|
attributes:
|
||||||
label: Region
|
label: Region
|
||||||
description: "Enter the region the site is accessible from"
|
description: Enter the country/region that the site is accessible from
|
||||||
placeholder: "India"
|
placeholder: India
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: example-urls
|
id: example-urls
|
||||||
attributes:
|
attributes:
|
||||||
@@ -43,32 +44,9 @@ body:
|
|||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
label: Description
|
label: Provide a description that is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Provide any additional information
|
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||||
placeholder: WRITE DESCRIPTION HERE
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: log
|
|
||||||
attributes:
|
|
||||||
label: Verbose log
|
|
||||||
description: |
|
|
||||||
Provide the complete verbose output **using one of the example URLs provided above**.
|
|
||||||
Add the `-vU` flag to your command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
|
||||||
[debug] Portable config file: yt-dlp.conf
|
|
||||||
[debug] Portable config: ['-i']
|
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
|
||||||
[debug] yt-dlp version %(version)s (exe)
|
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
|
||||||
[debug] Proxy map: {}
|
|
||||||
yt-dlp is up to date (%(version)s)
|
|
||||||
<more lines>
|
|
||||||
render: shell
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
%(verbose)s
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ name: Site feature request
|
|||||||
description: Request a new functionality for a supported site
|
description: Request a new functionality for a supported site
|
||||||
labels: [triage, site-enhancement]
|
labels: [triage, site-enhancement]
|
||||||
body:
|
body:
|
||||||
|
%(no_skip)s
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -9,13 +10,13 @@ body:
|
|||||||
description: |
|
description: |
|
||||||
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
||||||
options:
|
options:
|
||||||
- label: I'm reporting a site feature request
|
- label: I'm requesting a site-specific feature
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are alive and playable in a browser
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
@@ -24,8 +25,8 @@ body:
|
|||||||
id: region
|
id: region
|
||||||
attributes:
|
attributes:
|
||||||
label: Region
|
label: Region
|
||||||
description: "Enter the region the site is accessible from"
|
description: Enter the country/region that the site is accessible from
|
||||||
placeholder: "India"
|
placeholder: India
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: example-urls
|
id: example-urls
|
||||||
attributes:
|
attributes:
|
||||||
@@ -39,34 +40,9 @@ body:
|
|||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
label: Description
|
label: Provide a description that is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Provide an explanation of your site feature request in an arbitrary form.
|
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||||
Please make sure the description is worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
|
||||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
|
||||||
placeholder: WRITE DESCRIPTION HERE
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: log
|
|
||||||
attributes:
|
|
||||||
label: Verbose log
|
|
||||||
description: |
|
|
||||||
Provide the complete verbose output of yt-dlp that demonstrates the need for the enhancement.
|
|
||||||
Add the `-vU` flag to your command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
|
||||||
[debug] Portable config file: yt-dlp.conf
|
|
||||||
[debug] Portable config: ['-i']
|
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
|
||||||
[debug] yt-dlp version %(version)s (exe)
|
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
|
||||||
[debug] Proxy map: {}
|
|
||||||
yt-dlp is up to date (%(version)s)
|
|
||||||
<more lines>
|
|
||||||
render: shell
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
%(verbose)s
|
||||||
|
|||||||
44
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
44
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
@@ -1,7 +1,8 @@
|
|||||||
name: Bug report
|
name: Core bug report
|
||||||
description: Report a bug unrelated to any particular site or extractor
|
description: Report a bug unrelated to any particular site or extractor
|
||||||
labels: [triage, bug]
|
labels: [triage, bug]
|
||||||
body:
|
body:
|
||||||
|
%(no_skip)s
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -11,47 +12,22 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: I'm reporting a bug unrelated to a specific site
|
- label: I'm reporting a bug unrelated to a specific site
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are alive and playable in a browser
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
label: Description
|
label: Provide a description that is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Provide an explanation of your issue in an arbitrary form.
|
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||||
Please make sure the description is worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
|
||||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
|
||||||
placeholder: WRITE DESCRIPTION HERE
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: log
|
|
||||||
attributes:
|
|
||||||
label: Verbose log
|
|
||||||
description: |
|
|
||||||
Provide the complete verbose output of yt-dlp **that clearly demonstrates the problem**.
|
|
||||||
Add the `-vU` flag to **your** command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
|
||||||
[debug] Portable config file: yt-dlp.conf
|
|
||||||
[debug] Portable config: ['-i']
|
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
|
||||||
[debug] yt-dlp version %(version)s (exe)
|
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
|
||||||
[debug] Proxy map: {}
|
|
||||||
yt-dlp is up to date (%(version)s)
|
|
||||||
<more lines>
|
|
||||||
render: shell
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
%(verbose)s
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ name: Feature request
|
|||||||
description: Request a new functionality unrelated to any particular site or extractor
|
description: Request a new functionality unrelated to any particular site or extractor
|
||||||
labels: [triage, enhancement]
|
labels: [triage, enhancement]
|
||||||
body:
|
body:
|
||||||
|
%(no_skip)s
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -9,45 +10,22 @@ body:
|
|||||||
description: |
|
description: |
|
||||||
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
||||||
options:
|
options:
|
||||||
- label: I'm reporting a feature request
|
- label: I'm requesting a feature unrelated to a specific site
|
||||||
required: true
|
required: true
|
||||||
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
label: Description
|
label: Provide a description that is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Provide an explanation of your site feature request in an arbitrary form.
|
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||||
Please make sure the description is worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
|
||||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
|
||||||
placeholder: WRITE DESCRIPTION HERE
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
%(verbose_optional)s
|
||||||
id: log
|
|
||||||
attributes:
|
|
||||||
label: Verbose log
|
|
||||||
description: |
|
|
||||||
If your feature request involves an existing yt-dlp command, provide the complete verbose output of that command.
|
|
||||||
Add the `-vU` flag to **your** command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
|
||||||
[debug] Portable config file: yt-dlp.conf
|
|
||||||
[debug] Portable config: ['-i']
|
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
|
||||||
[debug] yt-dlp version 2021.12.01 (exe)
|
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
|
||||||
[debug] Proxy map: {}
|
|
||||||
yt-dlp is up to date (2021.12.01)
|
|
||||||
<more lines>
|
|
||||||
render: shell
|
|
||||||
|
|||||||
48
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
48
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
@@ -2,6 +2,13 @@ name: Ask question
|
|||||||
description: Ask yt-dlp related question
|
description: Ask yt-dlp related question
|
||||||
labels: [question]
|
labels: [question]
|
||||||
body:
|
body:
|
||||||
|
%(no_skip)s
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
### Make sure you are **only** asking a question and not reporting a bug or requesting a feature.
|
||||||
|
If your question contains "isn't working" or "can you add", this is most likely the wrong template.
|
||||||
|
If you are in doubt whether this is the right template, **USE ANOTHER TEMPLATE**!
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -9,45 +16,22 @@ body:
|
|||||||
description: |
|
description: |
|
||||||
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
||||||
options:
|
options:
|
||||||
- label: I'm asking a question and **not** reporting a bug/feature request
|
- label: I'm asking a question and **not** reporting a bug or requesting a feature
|
||||||
required: true
|
required: true
|
||||||
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions including closed ones
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions **including closed ones**. DO NOT post duplicates
|
||||||
|
required: true
|
||||||
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: question
|
id: question
|
||||||
attributes:
|
attributes:
|
||||||
label: Question
|
label: Please make sure the question is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Ask your question in an arbitrary form.
|
placeholder: Provide any additional information and as much context and examples as possible
|
||||||
Please make sure it's worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
|
||||||
Provide any additional information and as much context and examples as possible.
|
|
||||||
If your question contains "isn't working" or "can you add", this is most likely the wrong template.
|
|
||||||
If you are in doubt if this is the right template, use another template!
|
|
||||||
placeholder: WRITE QUESTION HERE
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
%(verbose_optional)s
|
||||||
id: log
|
|
||||||
attributes:
|
|
||||||
label: Verbose log
|
|
||||||
description: |
|
|
||||||
If your question involves a yt-dlp command, provide the complete verbose output of that command.
|
|
||||||
Add the `-vU` flag to **your** command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
|
||||||
[debug] Portable config file: yt-dlp.conf
|
|
||||||
[debug] Portable config: ['-i']
|
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
|
||||||
[debug] yt-dlp version 2021.12.01 (exe)
|
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
|
||||||
[debug] Proxy map: {}
|
|
||||||
yt-dlp is up to date (2021.12.01)
|
|
||||||
<more lines>
|
|
||||||
render: shell
|
|
||||||
|
|||||||
42
.github/PULL_REQUEST_TEMPLATE.md
vendored
42
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,28 +1,42 @@
|
|||||||
## Please follow the guide below
|
**IMPORTANT**: PRs without the template will be CLOSED
|
||||||
|
|
||||||
|
### Description of your *pull request* and other information
|
||||||
|
|
||||||
|
<!--
|
||||||
|
|
||||||
|
Explanation of your *pull request* in arbitrary form goes here. Please **make sure the description explains the purpose and effect** of your *pull request* and is worded well enough to be understood. Provide as much **context and examples** as possible
|
||||||
|
|
||||||
|
-->
|
||||||
|
|
||||||
|
ADD DESCRIPTION HERE
|
||||||
|
|
||||||
|
Fixes #
|
||||||
|
|
||||||
|
|
||||||
|
<details open><summary>Template</summary> <!-- OPEN is intentional -->
|
||||||
|
|
||||||
|
<!--
|
||||||
|
|
||||||
|
# PLEASE FOLLOW THE GUIDE BELOW
|
||||||
|
|
||||||
- You will be asked some questions, please read them **carefully** and answer honestly
|
- You will be asked some questions, please read them **carefully** and answer honestly
|
||||||
- Put an `x` into all the boxes [ ] relevant to your *pull request* (like that [x])
|
- Put an `x` into all the boxes `[ ]` relevant to your *pull request* (like [x])
|
||||||
- Use *Preview* tab to see how your *pull request* will actually look like
|
- Use *Preview* tab to see how your *pull request* will actually look like
|
||||||
|
|
||||||
---
|
-->
|
||||||
|
|
||||||
### Before submitting a *pull request* make sure you have:
|
### Before submitting a *pull request* make sure you have:
|
||||||
- [ ] At least skimmed through [contributing guidelines](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#developer-instructions) including [yt-dlp coding conventions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#yt-dlp-coding-conventions)
|
- [ ] At least skimmed through [contributing guidelines](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#developer-instructions) including [yt-dlp coding conventions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#yt-dlp-coding-conventions)
|
||||||
- [ ] [Searched](https://github.com/yt-dlp/yt-dlp/search?q=is%3Apr&type=Issues) the bugtracker for similar pull requests
|
- [ ] [Searched](https://github.com/yt-dlp/yt-dlp/search?q=is%3Apr&type=Issues) the bugtracker for similar pull requests
|
||||||
- [ ] Checked the code with [flake8](https://pypi.python.org/pypi/flake8)
|
|
||||||
|
|
||||||
### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check one of the following options:
|
### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check all of the following options that apply:
|
||||||
- [ ] I am the original author of this code and I am willing to release it under [Unlicense](http://unlicense.org/)
|
- [ ] I am the original author of this code and I am willing to release it under [Unlicense](http://unlicense.org/)
|
||||||
- [ ] I am not the original author of this code but it is in public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence)
|
- [ ] I am not the original author of this code but it is in public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence)
|
||||||
|
|
||||||
### What is the purpose of your *pull request*?
|
### What is the purpose of your *pull request*?
|
||||||
- [ ] Bug fix
|
- [ ] Fix or improvement to an extractor (Make sure to add/update tests)
|
||||||
- [ ] Improvement
|
- [ ] New extractor ([Piracy websites will not be accepted](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy))
|
||||||
- [ ] New extractor
|
- [ ] Core bug fix/improvement
|
||||||
- [ ] New feature
|
- [ ] New feature (It is strongly [recommended to open an issue first](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#adding-new-feature-or-making-overarching-changes))
|
||||||
|
|
||||||
---
|
</details>
|
||||||
|
|
||||||
### Description of your *pull request* and other information
|
|
||||||
|
|
||||||
Explanation of your *pull request* in arbitrary form goes here. Please make sure the description explains the purpose and effect of your *pull request* and is worded well enough to be understood. Provide as much context and examples as possible.
|
|
||||||
|
|||||||
10
.github/banner.svg
vendored
10
.github/banner.svg
vendored
File diff suppressed because one or more lines are too long
|
Before Width: | Height: | Size: 24 KiB After Width: | Height: | Size: 15 KiB |
874
.github/workflows/build.yml
vendored
874
.github/workflows/build.yml
vendored
@@ -1,413 +1,515 @@
|
|||||||
name: Build
|
name: Build Artifacts
|
||||||
on: workflow_dispatch
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
channel:
|
||||||
|
required: false
|
||||||
|
default: stable
|
||||||
|
type: string
|
||||||
|
unix:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
linux_static:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
linux_arm:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
macos:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
macos_legacy:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
windows:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
windows32:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
origin:
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
secrets:
|
||||||
|
GPG_SIGNING_KEY:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: |
|
||||||
|
VERSION: yyyy.mm.dd[.rev] or rev
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
channel:
|
||||||
|
description: |
|
||||||
|
SOURCE of this build's updates: stable/nightly/master/<repo>
|
||||||
|
required: true
|
||||||
|
default: stable
|
||||||
|
type: string
|
||||||
|
unix:
|
||||||
|
description: yt-dlp, yt-dlp.tar.gz
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
linux_static:
|
||||||
|
description: yt-dlp_linux
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
linux_arm:
|
||||||
|
description: yt-dlp_linux_aarch64, yt-dlp_linux_armv7l
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
macos:
|
||||||
|
description: yt-dlp_macos, yt-dlp_macos.zip
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
macos_legacy:
|
||||||
|
description: yt-dlp_macos_legacy
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
windows:
|
||||||
|
description: yt-dlp.exe, yt-dlp_min.exe, yt-dlp_win.zip
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
windows32:
|
||||||
|
description: yt-dlp_x86.exe
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
origin:
|
||||||
|
description: Origin
|
||||||
|
required: false
|
||||||
|
default: 'current repo'
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- 'current repo'
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build_unix:
|
process:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
version_suffix: ${{ steps.version_suffix.outputs.version_suffix }}
|
origin: ${{ steps.process_origin.outputs.origin }}
|
||||||
ytdlp_version: ${{ steps.bump_version.outputs.ytdlp_version }}
|
steps:
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
- name: Process origin
|
||||||
sha256_bin: ${{ steps.sha256_bin.outputs.sha256_bin }}
|
id: process_origin
|
||||||
sha512_bin: ${{ steps.sha512_bin.outputs.sha512_bin }}
|
run: |
|
||||||
sha256_tar: ${{ steps.sha256_tar.outputs.sha256_tar }}
|
echo "origin=${{ inputs.origin == 'current repo' && github.repository || inputs.origin }}" | tee "$GITHUB_OUTPUT"
|
||||||
sha512_tar: ${{ steps.sha512_tar.outputs.sha512_tar }}
|
|
||||||
|
unix:
|
||||||
|
needs: process
|
||||||
|
if: inputs.unix
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0 # Needed for changelog
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
- name: Install Requirements
|
||||||
|
run: |
|
||||||
|
sudo apt -y install zip pandoc man sed
|
||||||
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||||
|
python devscripts/update_changelog.py -vv
|
||||||
|
python devscripts/make_lazy_extractors.py
|
||||||
|
- name: Build Unix platform-independent binary
|
||||||
|
run: |
|
||||||
|
make all tar
|
||||||
|
- name: Verify --update-to
|
||||||
|
if: vars.UPDATE_TO_VERIFICATION
|
||||||
|
run: |
|
||||||
|
chmod +x ./yt-dlp
|
||||||
|
cp ./yt-dlp ./yt-dlp_downgraded
|
||||||
|
version="$(./yt-dlp --version)"
|
||||||
|
./yt-dlp_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||||
|
downgraded_version="$(./yt-dlp_downgraded --version)"
|
||||||
|
[[ "$version" != "$downgraded_version" ]]
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: build-bin-${{ github.job }}
|
||||||
|
path: |
|
||||||
|
yt-dlp
|
||||||
|
yt-dlp.tar.gz
|
||||||
|
compression-level: 0
|
||||||
|
|
||||||
|
linux_static:
|
||||||
|
needs: process
|
||||||
|
if: inputs.linux_static
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Build static executable
|
||||||
|
env:
|
||||||
|
channel: ${{ inputs.channel }}
|
||||||
|
origin: ${{ needs.process.outputs.origin }}
|
||||||
|
version: ${{ inputs.version }}
|
||||||
|
run: |
|
||||||
|
mkdir ~/build
|
||||||
|
cd bundle/docker
|
||||||
|
docker compose up --build static
|
||||||
|
sudo chown "${USER}:docker" ~/build/yt-dlp_linux
|
||||||
|
- name: Verify --update-to
|
||||||
|
if: vars.UPDATE_TO_VERIFICATION
|
||||||
|
run: |
|
||||||
|
chmod +x ~/build/yt-dlp_linux
|
||||||
|
cp ~/build/yt-dlp_linux ~/build/yt-dlp_linux_downgraded
|
||||||
|
version="$(~/build/yt-dlp_linux --version)"
|
||||||
|
~/build/yt-dlp_linux_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||||
|
downgraded_version="$(~/build/yt-dlp_linux_downgraded --version)"
|
||||||
|
[[ "$version" != "$downgraded_version" ]]
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: build-bin-${{ github.job }}
|
||||||
|
path: |
|
||||||
|
~/build/yt-dlp_linux
|
||||||
|
compression-level: 0
|
||||||
|
|
||||||
|
linux_arm:
|
||||||
|
needs: process
|
||||||
|
if: inputs.linux_arm
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write # for creating cache
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
architecture:
|
||||||
|
- armv7
|
||||||
|
- aarch64
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
path: ./repo
|
||||||
- name: Set up Python
|
- name: Virtualized Install, Prepare & Build
|
||||||
uses: actions/setup-python@v2
|
uses: yt-dlp/run-on-arch-action@v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.8'
|
# Ref: https://github.com/uraimo/run-on-arch-action/issues/55
|
||||||
- name: Install packages
|
env: |
|
||||||
run: sudo apt-get -y install zip pandoc man
|
GITHUB_WORKFLOW: build
|
||||||
- name: Set version suffix
|
githubToken: ${{ github.token }} # To cache image
|
||||||
id: version_suffix
|
arch: ${{ matrix.architecture }}
|
||||||
env:
|
distro: ubuntu18.04 # Standalone executable should be built on minimum supported OS
|
||||||
PUSH_VERSION_COMMIT: ${{ secrets.PUSH_VERSION_COMMIT }}
|
dockerRunArgs: --volume "${PWD}/repo:/repo"
|
||||||
if: "env.PUSH_VERSION_COMMIT == ''"
|
install: | # Installing Python 3.10 from the Deadsnakes repo raises errors
|
||||||
run: echo ::set-output name=version_suffix::$(date -u +"%H%M%S")
|
apt update
|
||||||
- name: Bump version
|
apt -y install zlib1g-dev libffi-dev python3.8 python3.8-dev python3.8-distutils python3-pip
|
||||||
id: bump_version
|
python3.8 -m pip install -U pip setuptools wheel
|
||||||
run: |
|
# Cannot access any files from the repo directory at this stage
|
||||||
python devscripts/update-version.py ${{ steps.version_suffix.outputs.version_suffix }}
|
python3.8 -m pip install -U Pyinstaller mutagen pycryptodomex websockets brotli certifi secretstorage cffi
|
||||||
make issuetemplates
|
|
||||||
- name: Push to release
|
|
||||||
id: push_release
|
|
||||||
run: |
|
|
||||||
git config --global user.name github-actions
|
|
||||||
git config --global user.email github-actions@example.com
|
|
||||||
git add -u
|
|
||||||
git commit -m "[version] update" -m "Created by: ${{ github.event.sender.login }}" -m ":ci skip all"
|
|
||||||
git push origin --force ${{ github.event.ref }}:release
|
|
||||||
echo ::set-output name=head_sha::$(git rev-parse HEAD)
|
|
||||||
- name: Update master
|
|
||||||
id: push_master
|
|
||||||
env:
|
|
||||||
PUSH_VERSION_COMMIT: ${{ secrets.PUSH_VERSION_COMMIT }}
|
|
||||||
if: "env.PUSH_VERSION_COMMIT != ''"
|
|
||||||
run: git push origin ${{ github.event.ref }}
|
|
||||||
- name: Get Changelog
|
|
||||||
id: get_changelog
|
|
||||||
run: |
|
|
||||||
changelog=$(cat Changelog.md | grep -oPz '(?s)(?<=### ${{ steps.bump_version.outputs.ytdlp_version }}\n{2}).+?(?=\n{2,3}###)') || true
|
|
||||||
echo "changelog<<EOF" >> $GITHUB_ENV
|
|
||||||
echo "$changelog" >> $GITHUB_ENV
|
|
||||||
echo "EOF" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Build lazy extractors
|
run: |
|
||||||
id: lazy_extractors
|
cd repo
|
||||||
run: python devscripts/make_lazy_extractors.py
|
python3.8 devscripts/install_deps.py -o --include build
|
||||||
- name: Run Make
|
python3.8 devscripts/install_deps.py --include pyinstaller --include secretstorage # Cached version may be out of date
|
||||||
run: make all tar
|
python3.8 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||||
- name: Get SHA2-256SUMS for yt-dlp
|
python3.8 devscripts/make_lazy_extractors.py
|
||||||
id: sha256_bin
|
python3.8 -m bundle.pyinstaller
|
||||||
run: echo "::set-output name=sha256_bin::$(sha256sum yt-dlp | awk '{print $1}')"
|
|
||||||
- name: Get SHA2-256SUMS for yt-dlp.tar.gz
|
|
||||||
id: sha256_tar
|
|
||||||
run: echo "::set-output name=sha256_tar::$(sha256sum yt-dlp.tar.gz | awk '{print $1}')"
|
|
||||||
- name: Get SHA2-512SUMS for yt-dlp
|
|
||||||
id: sha512_bin
|
|
||||||
run: echo "::set-output name=sha512_bin::$(sha512sum yt-dlp | awk '{print $1}')"
|
|
||||||
- name: Get SHA2-512SUMS for yt-dlp.tar.gz
|
|
||||||
id: sha512_tar
|
|
||||||
run: echo "::set-output name=sha512_tar::$(sha512sum yt-dlp.tar.gz | awk '{print $1}')"
|
|
||||||
|
|
||||||
- name: Install dependencies for pypi
|
if ${{ vars.UPDATE_TO_VERIFICATION && 'true' || 'false' }}; then
|
||||||
env:
|
arch="${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}"
|
||||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
chmod +x ./dist/yt-dlp_linux_${arch}
|
||||||
if: "env.PYPI_TOKEN != ''"
|
cp ./dist/yt-dlp_linux_${arch} ./dist/yt-dlp_linux_${arch}_downgraded
|
||||||
run: |
|
version="$(./dist/yt-dlp_linux_${arch} --version)"
|
||||||
python -m pip install --upgrade pip
|
./dist/yt-dlp_linux_${arch}_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||||
pip install setuptools wheel twine
|
downgraded_version="$(./dist/yt-dlp_linux_${arch}_downgraded --version)"
|
||||||
- name: Build and publish on pypi
|
[[ "$version" != "$downgraded_version" ]]
|
||||||
env:
|
fi
|
||||||
TWINE_USERNAME: __token__
|
|
||||||
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
|
|
||||||
if: "env.TWINE_PASSWORD != ''"
|
|
||||||
run: |
|
|
||||||
rm -rf dist/*
|
|
||||||
python setup.py sdist bdist_wheel
|
|
||||||
twine upload dist/*
|
|
||||||
|
|
||||||
- name: Install SSH private key
|
- name: Upload artifacts
|
||||||
env:
|
uses: actions/upload-artifact@v4
|
||||||
BREW_TOKEN: ${{ secrets.BREW_TOKEN }}
|
with:
|
||||||
if: "env.BREW_TOKEN != ''"
|
name: build-bin-linux_${{ matrix.architecture }}
|
||||||
uses: yt-dlp/ssh-agent@v0.5.3
|
path: | # run-on-arch-action designates armv7l as armv7
|
||||||
with:
|
repo/dist/yt-dlp_linux_${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}
|
||||||
ssh-private-key: ${{ env.BREW_TOKEN }}
|
compression-level: 0
|
||||||
- name: Update Homebrew Formulae
|
|
||||||
env:
|
|
||||||
BREW_TOKEN: ${{ secrets.BREW_TOKEN }}
|
|
||||||
if: "env.BREW_TOKEN != ''"
|
|
||||||
run: |
|
|
||||||
git clone git@github.com:yt-dlp/homebrew-taps taps/
|
|
||||||
python3 devscripts/update-formulae.py taps/Formula/yt-dlp.rb "${{ steps.bump_version.outputs.ytdlp_version }}"
|
|
||||||
git -C taps/ config user.name github-actions
|
|
||||||
git -C taps/ config user.email github-actions@example.com
|
|
||||||
git -C taps/ commit -am 'yt-dlp: ${{ steps.bump_version.outputs.ytdlp_version }}'
|
|
||||||
git -C taps/ push
|
|
||||||
|
|
||||||
- name: Create Release
|
macos:
|
||||||
id: create_release
|
needs: process
|
||||||
uses: actions/create-release@v1
|
if: inputs.macos
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
tag_name: ${{ steps.bump_version.outputs.ytdlp_version }}
|
|
||||||
release_name: yt-dlp ${{ steps.bump_version.outputs.ytdlp_version }}
|
|
||||||
commitish: ${{ steps.push_release.outputs.head_sha }}
|
|
||||||
body: |
|
|
||||||
#### [A description of the various files]((https://github.com/yt-dlp/yt-dlp#release-files)) are in the README
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Changelog:
|
|
||||||
${{ env.changelog }}
|
|
||||||
draft: false
|
|
||||||
prerelease: false
|
|
||||||
- name: Upload yt-dlp Unix binary
|
|
||||||
id: upload-release-asset
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
|
||||||
asset_path: ./yt-dlp
|
|
||||||
asset_name: yt-dlp
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
- name: Upload Source tar
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
|
||||||
asset_path: ./yt-dlp.tar.gz
|
|
||||||
asset_name: yt-dlp.tar.gz
|
|
||||||
asset_content_type: application/gzip
|
|
||||||
|
|
||||||
build_macos:
|
|
||||||
runs-on: macos-11
|
runs-on: macos-11
|
||||||
needs: build_unix
|
|
||||||
outputs:
|
|
||||||
sha256_macos: ${{ steps.sha256_macos.outputs.sha256_macos }}
|
|
||||||
sha512_macos: ${{ steps.sha512_macos.outputs.sha512_macos }}
|
|
||||||
sha256_macos_zip: ${{ steps.sha256_macos_zip.outputs.sha256_macos_zip }}
|
|
||||||
sha512_macos_zip: ${{ steps.sha512_macos_zip.outputs.sha512_macos_zip }}
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
# In order to create a universal2 application, the version of python3 in /usr/bin has to be used
|
# NB: Building universal2 does not work with python from actions/setup-python
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: |
|
run: |
|
||||||
brew install coreutils
|
brew install coreutils
|
||||||
/usr/bin/python3 -m pip install -U --user pip Pyinstaller==4.10 -r requirements.txt
|
python3 devscripts/install_deps.py --user -o --include build
|
||||||
- name: Bump version
|
python3 devscripts/install_deps.py --print --include pyinstaller > requirements.txt
|
||||||
id: bump_version
|
# We need to ignore wheels otherwise we break universal2 builds
|
||||||
run: /usr/bin/python3 devscripts/update-version.py
|
python3 -m pip install -U --user --no-binary :all: -r requirements.txt
|
||||||
- name: Build lazy extractors
|
# We need to fuse our own universal2 wheels for curl_cffi
|
||||||
id: lazy_extractors
|
python3 -m pip install -U --user delocate
|
||||||
run: /usr/bin/python3 devscripts/make_lazy_extractors.py
|
mkdir curl_cffi_whls curl_cffi_universal2
|
||||||
- name: Run PyInstaller Script
|
python3 devscripts/install_deps.py --print -o --include curl-cffi > requirements.txt
|
||||||
run: /usr/bin/python3 pyinst.py --target-architecture universal2 --onefile
|
for platform in "macosx_11_0_arm64" "macosx_11_0_x86_64"; do
|
||||||
- name: Upload yt-dlp MacOS binary
|
python3 -m pip download \
|
||||||
id: upload-release-macos
|
--only-binary=:all: \
|
||||||
uses: actions/upload-release-asset@v1
|
--platform "${platform}" \
|
||||||
env:
|
--pre -d curl_cffi_whls \
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
-r requirements.txt
|
||||||
with:
|
done
|
||||||
upload_url: ${{ needs.build_unix.outputs.upload_url }}
|
python3 -m delocate.cmd.delocate_fuse curl_cffi_whls/curl_cffi*.whl -w curl_cffi_universal2
|
||||||
asset_path: ./dist/yt-dlp_macos
|
python3 -m delocate.cmd.delocate_fuse curl_cffi_whls/cffi*.whl -w curl_cffi_universal2
|
||||||
asset_name: yt-dlp_macos
|
cd curl_cffi_universal2
|
||||||
asset_content_type: application/octet-stream
|
for wheel in *cffi*.whl; do mv -n -- "${wheel}" "${wheel/x86_64/universal2}"; done
|
||||||
- name: Get SHA2-256SUMS for yt-dlp_macos
|
python3 -m pip install -U --user *cffi*.whl
|
||||||
id: sha256_macos
|
|
||||||
run: echo "::set-output name=sha256_macos::$(sha256sum dist/yt-dlp_macos | awk '{print $1}')"
|
|
||||||
- name: Get SHA2-512SUMS for yt-dlp_macos
|
|
||||||
id: sha512_macos
|
|
||||||
run: echo "::set-output name=sha512_macos::$(sha512sum dist/yt-dlp_macos | awk '{print $1}')"
|
|
||||||
|
|
||||||
- name: Run PyInstaller Script with --onedir
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
/usr/bin/python3 pyinst.py --target-architecture universal2 --onedir
|
python3 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||||
zip ./dist/yt-dlp_macos.zip ./dist/yt-dlp_macos
|
python3 devscripts/make_lazy_extractors.py
|
||||||
- name: Upload yt-dlp MacOS onedir
|
- name: Build
|
||||||
id: upload-release-macos-zip
|
run: |
|
||||||
uses: actions/upload-release-asset@v1
|
python3 -m bundle.pyinstaller --target-architecture universal2 --onedir
|
||||||
env:
|
(cd ./dist/yt-dlp_macos && zip -r ../yt-dlp_macos.zip .)
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
python3 -m bundle.pyinstaller --target-architecture universal2
|
||||||
with:
|
|
||||||
upload_url: ${{ needs.build_unix.outputs.upload_url }}
|
|
||||||
asset_path: ./dist/yt-dlp_macos.zip
|
|
||||||
asset_name: yt-dlp_macos.zip
|
|
||||||
asset_content_type: application/zip
|
|
||||||
- name: Get SHA2-256SUMS for yt-dlp_macos.zip
|
|
||||||
id: sha256_macos_zip
|
|
||||||
run: echo "::set-output name=sha256_macos_zip::$(sha256sum dist/yt-dlp_macos.zip | awk '{print $1}')"
|
|
||||||
- name: Get SHA2-512SUMS for yt-dlp_macos.zip
|
|
||||||
id: sha512_macos_zip
|
|
||||||
run: echo "::set-output name=sha512_macos_zip::$(sha512sum dist/yt-dlp_macos.zip | awk '{print $1}')"
|
|
||||||
|
|
||||||
build_windows:
|
- name: Verify --update-to
|
||||||
runs-on: windows-latest
|
if: vars.UPDATE_TO_VERIFICATION
|
||||||
needs: build_unix
|
run: |
|
||||||
outputs:
|
chmod +x ./dist/yt-dlp_macos
|
||||||
sha256_win: ${{ steps.sha256_win.outputs.sha256_win }}
|
cp ./dist/yt-dlp_macos ./dist/yt-dlp_macos_downgraded
|
||||||
sha512_win: ${{ steps.sha512_win.outputs.sha512_win }}
|
version="$(./dist/yt-dlp_macos --version)"
|
||||||
sha256_py2exe: ${{ steps.sha256_py2exe.outputs.sha256_py2exe }}
|
./dist/yt-dlp_macos_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||||
sha512_py2exe: ${{ steps.sha512_py2exe.outputs.sha512_py2exe }}
|
downgraded_version="$(./dist/yt-dlp_macos_downgraded --version)"
|
||||||
sha256_win_zip: ${{ steps.sha256_win_zip.outputs.sha256_win_zip }}
|
[[ "$version" != "$downgraded_version" ]]
|
||||||
sha512_win_zip: ${{ steps.sha512_win_zip.outputs.sha512_win_zip }}
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: build-bin-${{ github.job }}
|
||||||
|
path: |
|
||||||
|
dist/yt-dlp_macos
|
||||||
|
dist/yt-dlp_macos.zip
|
||||||
|
compression-level: 0
|
||||||
|
|
||||||
|
macos_legacy:
|
||||||
|
needs: process
|
||||||
|
if: inputs.macos_legacy
|
||||||
|
runs-on: macos-12
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
# 3.8 is used for Win7 support
|
- name: Install Python
|
||||||
- name: Set up Python 3.8
|
# We need the official Python, because the GA ones only support newer macOS versions
|
||||||
uses: actions/setup-python@v2
|
env:
|
||||||
with:
|
PYTHON_VERSION: 3.10.5
|
||||||
python-version: '3.8'
|
MACOSX_DEPLOYMENT_TARGET: 10.9 # Used up by the Python build tools
|
||||||
- name: Install Requirements
|
run: |
|
||||||
# Custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds
|
# Hack to get the latest patch version. Uncomment if needed
|
||||||
run: |
|
#brew install python@3.10
|
||||||
python -m pip install --upgrade pip setuptools wheel py2exe
|
#export PYTHON_VERSION=$( $(brew --prefix)/opt/python@3.10/bin/python3 --version | cut -d ' ' -f 2 )
|
||||||
pip install "https://yt-dlp.github.io/Pyinstaller-Builds/x86_64/pyinstaller-4.10-py3-none-any.whl" -r requirements.txt
|
curl https://www.python.org/ftp/python/${PYTHON_VERSION}/python-${PYTHON_VERSION}-macos11.pkg -o "python.pkg"
|
||||||
- name: Bump version
|
sudo installer -pkg python.pkg -target /
|
||||||
id: bump_version
|
python3 --version
|
||||||
env:
|
- name: Install Requirements
|
||||||
version_suffix: ${{ needs.build_unix.outputs.version_suffix }}
|
run: |
|
||||||
run: python devscripts/update-version.py ${{ env.version_suffix }}
|
brew install coreutils
|
||||||
- name: Build lazy extractors
|
python3 devscripts/install_deps.py --user -o --include build
|
||||||
id: lazy_extractors
|
python3 devscripts/install_deps.py --user --include pyinstaller
|
||||||
run: python devscripts/make_lazy_extractors.py
|
|
||||||
- name: Run PyInstaller Script
|
|
||||||
run: python pyinst.py
|
|
||||||
- name: Upload yt-dlp.exe Windows binary
|
|
||||||
id: upload-release-windows
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ needs.build_unix.outputs.upload_url }}
|
|
||||||
asset_path: ./dist/yt-dlp.exe
|
|
||||||
asset_name: yt-dlp.exe
|
|
||||||
asset_content_type: application/vnd.microsoft.portable-executable
|
|
||||||
- name: Get SHA2-256SUMS for yt-dlp.exe
|
|
||||||
id: sha256_win
|
|
||||||
run: echo "::set-output name=sha256_win::$((Get-FileHash dist\yt-dlp.exe -Algorithm SHA256).Hash.ToLower())"
|
|
||||||
- name: Get SHA2-512SUMS for yt-dlp.exe
|
|
||||||
id: sha512_win
|
|
||||||
run: echo "::set-output name=sha512_win::$((Get-FileHash dist\yt-dlp.exe -Algorithm SHA512).Hash.ToLower())"
|
|
||||||
|
|
||||||
- name: Run PyInstaller Script with --onedir
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
python pyinst.py --onedir
|
python3 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||||
Compress-Archive -LiteralPath ./dist/yt-dlp -DestinationPath ./dist/yt-dlp_win.zip
|
python3 devscripts/make_lazy_extractors.py
|
||||||
- name: Upload yt-dlp Windows onedir
|
- name: Build
|
||||||
id: upload-release-windows-zip
|
run: |
|
||||||
uses: actions/upload-release-asset@v1
|
python3 -m bundle.pyinstaller
|
||||||
env:
|
mv dist/yt-dlp_macos dist/yt-dlp_macos_legacy
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ needs.build_unix.outputs.upload_url }}
|
|
||||||
asset_path: ./dist/yt-dlp_win.zip
|
|
||||||
asset_name: yt-dlp_win.zip
|
|
||||||
asset_content_type: application/zip
|
|
||||||
- name: Get SHA2-256SUMS for yt-dlp_win.zip
|
|
||||||
id: sha256_win_zip
|
|
||||||
run: echo "::set-output name=sha256_win_zip::$((Get-FileHash dist\yt-dlp_win.zip -Algorithm SHA256).Hash.ToLower())"
|
|
||||||
- name: Get SHA2-512SUMS for yt-dlp_win.zip
|
|
||||||
id: sha512_win_zip
|
|
||||||
run: echo "::set-output name=sha512_win_zip::$((Get-FileHash dist\yt-dlp_win.zip -Algorithm SHA512).Hash.ToLower())"
|
|
||||||
|
|
||||||
- name: Run py2exe Script
|
- name: Verify --update-to
|
||||||
run: python setup.py py2exe
|
if: vars.UPDATE_TO_VERIFICATION
|
||||||
- name: Upload yt-dlp_min.exe Windows binary
|
run: |
|
||||||
id: upload-release-windows-py2exe
|
chmod +x ./dist/yt-dlp_macos_legacy
|
||||||
uses: actions/upload-release-asset@v1
|
cp ./dist/yt-dlp_macos_legacy ./dist/yt-dlp_macos_legacy_downgraded
|
||||||
env:
|
version="$(./dist/yt-dlp_macos_legacy --version)"
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
./dist/yt-dlp_macos_legacy_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||||
with:
|
downgraded_version="$(./dist/yt-dlp_macos_legacy_downgraded --version)"
|
||||||
upload_url: ${{ needs.build_unix.outputs.upload_url }}
|
[[ "$version" != "$downgraded_version" ]]
|
||||||
asset_path: ./dist/yt-dlp.exe
|
|
||||||
asset_name: yt-dlp_min.exe
|
|
||||||
asset_content_type: application/vnd.microsoft.portable-executable
|
|
||||||
- name: Get SHA2-256SUMS for yt-dlp_min.exe
|
|
||||||
id: sha256_py2exe
|
|
||||||
run: echo "::set-output name=sha256_py2exe::$((Get-FileHash dist\yt-dlp.exe -Algorithm SHA256).Hash.ToLower())"
|
|
||||||
- name: Get SHA2-512SUMS for yt-dlp_min.exe
|
|
||||||
id: sha512_py2exe
|
|
||||||
run: echo "::set-output name=sha512_py2exe::$((Get-FileHash dist\yt-dlp.exe -Algorithm SHA512).Hash.ToLower())"
|
|
||||||
|
|
||||||
build_windows32:
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: build-bin-${{ github.job }}
|
||||||
|
path: |
|
||||||
|
dist/yt-dlp_macos_legacy
|
||||||
|
compression-level: 0
|
||||||
|
|
||||||
|
windows:
|
||||||
|
needs: process
|
||||||
|
if: inputs.windows
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
needs: build_unix
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
sha256_win32: ${{ steps.sha256_win32.outputs.sha256_win32 }}
|
|
||||||
sha512_win32: ${{ steps.sha512_win32.outputs.sha512_win32 }}
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
# 3.7 is used for Vista support. See https://github.com/yt-dlp/yt-dlp/issues/390
|
- uses: actions/setup-python@v5
|
||||||
- name: Set up Python 3.7 32-Bit
|
with: # 3.8 is used for Win7 support
|
||||||
uses: actions/setup-python@v2
|
python-version: "3.8"
|
||||||
with:
|
- name: Install Requirements
|
||||||
python-version: '3.7'
|
run: | # Custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds
|
||||||
architecture: 'x86'
|
python devscripts/install_deps.py -o --include build
|
||||||
- name: Install Requirements
|
python devscripts/install_deps.py --include curl-cffi
|
||||||
run: |
|
python -m pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/x86_64/pyinstaller-5.8.0-py3-none-any.whl"
|
||||||
python -m pip install --upgrade pip setuptools wheel
|
|
||||||
pip install "https://yt-dlp.github.io/Pyinstaller-Builds/i686/pyinstaller-4.10-py3-none-any.whl" -r requirements.txt
|
|
||||||
- name: Bump version
|
|
||||||
id: bump_version
|
|
||||||
env:
|
|
||||||
version_suffix: ${{ needs.build_unix.outputs.version_suffix }}
|
|
||||||
run: python devscripts/update-version.py ${{ env.version_suffix }}
|
|
||||||
- name: Build lazy extractors
|
|
||||||
id: lazy_extractors
|
|
||||||
run: python devscripts/make_lazy_extractors.py
|
|
||||||
- name: Run PyInstaller Script for 32 Bit
|
|
||||||
run: python pyinst.py
|
|
||||||
- name: Upload Executable yt-dlp_x86.exe
|
|
||||||
id: upload-release-windows32
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ needs.build_unix.outputs.upload_url }}
|
|
||||||
asset_path: ./dist/yt-dlp_x86.exe
|
|
||||||
asset_name: yt-dlp_x86.exe
|
|
||||||
asset_content_type: application/vnd.microsoft.portable-executable
|
|
||||||
- name: Get SHA2-256SUMS for yt-dlp_x86.exe
|
|
||||||
id: sha256_win32
|
|
||||||
run: echo "::set-output name=sha256_win32::$((Get-FileHash dist\yt-dlp_x86.exe -Algorithm SHA256).Hash.ToLower())"
|
|
||||||
- name: Get SHA2-512SUMS for yt-dlp_x86.exe
|
|
||||||
id: sha512_win32
|
|
||||||
run: echo "::set-output name=sha512_win32::$((Get-FileHash dist\yt-dlp_x86.exe -Algorithm SHA512).Hash.ToLower())"
|
|
||||||
|
|
||||||
finish:
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||||
|
python devscripts/make_lazy_extractors.py
|
||||||
|
- name: Build
|
||||||
|
run: |
|
||||||
|
python -m bundle.pyinstaller
|
||||||
|
python -m bundle.pyinstaller --onedir
|
||||||
|
Move-Item ./dist/yt-dlp.exe ./dist/yt-dlp_real.exe
|
||||||
|
Compress-Archive -Path ./dist/yt-dlp/* -DestinationPath ./dist/yt-dlp_win.zip
|
||||||
|
|
||||||
|
- name: Install Requirements (py2exe)
|
||||||
|
run: |
|
||||||
|
python devscripts/install_deps.py --include py2exe
|
||||||
|
- name: Build (py2exe)
|
||||||
|
run: |
|
||||||
|
python -m bundle.py2exe
|
||||||
|
Move-Item ./dist/yt-dlp.exe ./dist/yt-dlp_min.exe
|
||||||
|
Move-Item ./dist/yt-dlp_real.exe ./dist/yt-dlp.exe
|
||||||
|
|
||||||
|
- name: Verify --update-to
|
||||||
|
if: vars.UPDATE_TO_VERIFICATION
|
||||||
|
run: |
|
||||||
|
foreach ($name in @("yt-dlp","yt-dlp_min")) {
|
||||||
|
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
||||||
|
$version = & "./dist/${name}.exe" --version
|
||||||
|
& "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||||
|
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
|
||||||
|
if ($version -eq $downgraded_version) {
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: build-bin-${{ github.job }}
|
||||||
|
path: |
|
||||||
|
dist/yt-dlp.exe
|
||||||
|
dist/yt-dlp_min.exe
|
||||||
|
dist/yt-dlp_win.zip
|
||||||
|
compression-level: 0
|
||||||
|
|
||||||
|
windows32:
|
||||||
|
needs: process
|
||||||
|
if: inputs.windows32
|
||||||
|
runs-on: windows-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.8"
|
||||||
|
architecture: "x86"
|
||||||
|
- name: Install Requirements
|
||||||
|
run: |
|
||||||
|
python devscripts/install_deps.py -o --include build
|
||||||
|
python devscripts/install_deps.py
|
||||||
|
python -m pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/i686/pyinstaller-5.8.0-py3-none-any.whl"
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||||
|
python devscripts/make_lazy_extractors.py
|
||||||
|
- name: Build
|
||||||
|
run: |
|
||||||
|
python -m bundle.pyinstaller
|
||||||
|
|
||||||
|
- name: Verify --update-to
|
||||||
|
if: vars.UPDATE_TO_VERIFICATION
|
||||||
|
run: |
|
||||||
|
foreach ($name in @("yt-dlp_x86")) {
|
||||||
|
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
||||||
|
$version = & "./dist/${name}.exe" --version
|
||||||
|
& "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||||
|
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
|
||||||
|
if ($version -eq $downgraded_version) {
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: build-bin-${{ github.job }}
|
||||||
|
path: |
|
||||||
|
dist/yt-dlp_x86.exe
|
||||||
|
compression-level: 0
|
||||||
|
|
||||||
|
meta_files:
|
||||||
|
if: always() && !cancelled()
|
||||||
|
needs:
|
||||||
|
- process
|
||||||
|
- unix
|
||||||
|
- linux_static
|
||||||
|
- linux_arm
|
||||||
|
- macos
|
||||||
|
- macos_legacy
|
||||||
|
- windows
|
||||||
|
- windows32
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [build_unix, build_windows, build_windows32, build_macos]
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Make SHA2-256SUMS file
|
- uses: actions/download-artifact@v4
|
||||||
env:
|
with:
|
||||||
SHA256_BIN: ${{ needs.build_unix.outputs.sha256_bin }}
|
path: artifact
|
||||||
SHA256_TAR: ${{ needs.build_unix.outputs.sha256_tar }}
|
pattern: build-bin-*
|
||||||
SHA256_WIN: ${{ needs.build_windows.outputs.sha256_win }}
|
merge-multiple: true
|
||||||
SHA256_PY2EXE: ${{ needs.build_windows.outputs.sha256_py2exe }}
|
|
||||||
SHA256_WIN_ZIP: ${{ needs.build_windows.outputs.sha256_win_zip }}
|
- name: Make SHA2-SUMS files
|
||||||
SHA256_WIN32: ${{ needs.build_windows32.outputs.sha256_win32 }}
|
run: |
|
||||||
SHA256_MACOS: ${{ needs.build_macos.outputs.sha256_macos }}
|
cd ./artifact/
|
||||||
SHA256_MACOS_ZIP: ${{ needs.build_macos.outputs.sha256_macos_zip }}
|
# make sure SHA sums are also printed to stdout
|
||||||
run: |
|
sha256sum * | tee ../SHA2-256SUMS
|
||||||
echo "${{ env.SHA256_BIN }} yt-dlp" >> SHA2-256SUMS
|
sha512sum * | tee ../SHA2-512SUMS
|
||||||
echo "${{ env.SHA256_TAR }} yt-dlp.tar.gz" >> SHA2-256SUMS
|
|
||||||
echo "${{ env.SHA256_WIN }} yt-dlp.exe" >> SHA2-256SUMS
|
- name: Make Update spec
|
||||||
echo "${{ env.SHA256_PY2EXE }} yt-dlp_min.exe" >> SHA2-256SUMS
|
run: |
|
||||||
echo "${{ env.SHA256_WIN32 }} yt-dlp_x86.exe" >> SHA2-256SUMS
|
cat >> _update_spec << EOF
|
||||||
echo "${{ env.SHA256_WIN_ZIP }} yt-dlp_win.zip" >> SHA2-256SUMS
|
# This file is used for regulating self-update
|
||||||
echo "${{ env.SHA256_MACOS }} yt-dlp_macos" >> SHA2-256SUMS
|
lock 2022.08.18.36 .+ Python 3\.6
|
||||||
echo "${{ env.SHA256_MACOS_ZIP }} yt-dlp_macos.zip" >> SHA2-256SUMS
|
lock 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
||||||
- name: Upload 256SUMS file
|
lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
id: upload-sums
|
lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3\.6
|
||||||
uses: actions/upload-release-asset@v1
|
lockV2 yt-dlp/yt-dlp 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
||||||
env:
|
lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 (?!win_x86_exe).+ Python 3\.7
|
||||||
with:
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
upload_url: ${{ needs.build_unix.outputs.upload_url }}
|
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 (?!win_x86_exe).+ Python 3\.7
|
||||||
asset_path: ./SHA2-256SUMS
|
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
asset_name: SHA2-256SUMS
|
EOF
|
||||||
asset_content_type: text/plain
|
|
||||||
- name: Make SHA2-512SUMS file
|
- name: Sign checksum files
|
||||||
env:
|
env:
|
||||||
SHA512_BIN: ${{ needs.build_unix.outputs.sha512_bin }}
|
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||||
SHA512_TAR: ${{ needs.build_unix.outputs.sha512_tar }}
|
if: env.GPG_SIGNING_KEY != ''
|
||||||
SHA512_WIN: ${{ needs.build_windows.outputs.sha512_win }}
|
run: |
|
||||||
SHA512_PY2EXE: ${{ needs.build_windows.outputs.sha512_py2exe }}
|
gpg --batch --import <<< "${{ secrets.GPG_SIGNING_KEY }}"
|
||||||
SHA512_WIN_ZIP: ${{ needs.build_windows.outputs.sha512_win_zip }}
|
for signfile in ./SHA*SUMS; do
|
||||||
SHA512_WIN32: ${{ needs.build_windows32.outputs.sha512_win32 }}
|
gpg --batch --detach-sign "$signfile"
|
||||||
SHA512_MACOS: ${{ needs.build_macos.outputs.sha512_macos }}
|
done
|
||||||
SHA512_MACOS_ZIP: ${{ needs.build_macos.outputs.sha512_macos_zip }}
|
|
||||||
run: |
|
- name: Upload artifacts
|
||||||
echo "${{ env.SHA512_BIN }} yt-dlp" >> SHA2-512SUMS
|
uses: actions/upload-artifact@v4
|
||||||
echo "${{ env.SHA512_TAR }} yt-dlp.tar.gz" >> SHA2-512SUMS
|
with:
|
||||||
echo "${{ env.SHA512_WIN }} yt-dlp.exe" >> SHA2-512SUMS
|
name: build-${{ github.job }}
|
||||||
echo "${{ env.SHA512_WIN_ZIP }} yt-dlp_win.zip" >> SHA2-512SUMS
|
path: |
|
||||||
echo "${{ env.SHA512_PY2EXE }} yt-dlp_min.exe" >> SHA2-512SUMS
|
_update_spec
|
||||||
echo "${{ env.SHA512_WIN32 }} yt-dlp_x86.exe" >> SHA2-512SUMS
|
SHA*SUMS*
|
||||||
echo "${{ env.SHA512_MACOS }} yt-dlp_macos" >> SHA2-512SUMS
|
compression-level: 0
|
||||||
echo "${{ env.SHA512_MACOS_ZIP }} yt-dlp_macos.zip" >> SHA2-512SUMS
|
overwrite: true
|
||||||
- name: Upload 512SUMS file
|
|
||||||
id: upload-512sums
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ needs.build_unix.outputs.upload_url }}
|
|
||||||
asset_path: ./SHA2-512SUMS
|
|
||||||
asset_name: SHA2-512SUMS
|
|
||||||
asset_content_type: text/plain
|
|
||||||
|
|||||||
65
.github/workflows/codeql.yml
vendored
Normal file
65
.github/workflows/codeql.yml
vendored
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
name: "CodeQL"
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ 'master', 'gh-pages', 'release' ]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [ 'master' ]
|
||||||
|
schedule:
|
||||||
|
- cron: '59 11 * * 5'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Analyze
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
security-events: write
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
language: [ 'python' ]
|
||||||
|
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||||
|
# Use only 'java' to analyze code written in Java, Kotlin or both
|
||||||
|
# Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
|
||||||
|
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v2
|
||||||
|
with:
|
||||||
|
languages: ${{ matrix.language }}
|
||||||
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
|
# By default, queries listed here will override any specified in a config file.
|
||||||
|
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||||
|
|
||||||
|
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||||
|
# queries: security-extended,security-and-quality
|
||||||
|
|
||||||
|
|
||||||
|
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
||||||
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v2
|
||||||
|
|
||||||
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
|
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||||
|
|
||||||
|
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
||||||
|
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
||||||
|
|
||||||
|
# - run: |
|
||||||
|
# echo "Run, Build Application using script"
|
||||||
|
# ./location_of_script_within_repo/buildscript.sh
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: "/language:${{matrix.language}}"
|
||||||
58
.github/workflows/core.yml
vendored
58
.github/workflows/core.yml
vendored
@@ -1,5 +1,32 @@
|
|||||||
name: Core Tests
|
name: Core Tests
|
||||||
on: [push, pull_request]
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- .github/**
|
||||||
|
- devscripts/**
|
||||||
|
- test/**
|
||||||
|
- yt_dlp/**.py
|
||||||
|
- '!yt_dlp/extractor/*.py'
|
||||||
|
- yt_dlp/extractor/__init__.py
|
||||||
|
- yt_dlp/extractor/common.py
|
||||||
|
- yt_dlp/extractor/extractors.py
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- .github/**
|
||||||
|
- devscripts/**
|
||||||
|
- test/**
|
||||||
|
- yt_dlp/**.py
|
||||||
|
- '!yt_dlp/extractor/*.py'
|
||||||
|
- yt_dlp/extractor/__init__.py
|
||||||
|
- yt_dlp/extractor/common.py
|
||||||
|
- yt_dlp/extractor/extractors.py
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: core-${{ github.event.pull_request.number || github.ref }}
|
||||||
|
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
tests:
|
tests:
|
||||||
name: Core Tests
|
name: Core Tests
|
||||||
@@ -8,24 +35,27 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-18.04]
|
os: [ubuntu-latest]
|
||||||
# py3.9 is in quick-test
|
# CPython 3.8 is in quick-test
|
||||||
python-version: [3.7, 3.8, 3.10-dev, pypy-3.6, pypy-3.7]
|
python-version: ['3.9', '3.10', '3.11', '3.12', pypy-3.8, pypy-3.10]
|
||||||
run-tests-ext: [sh]
|
|
||||||
include:
|
include:
|
||||||
# atleast one of the tests must be in windows
|
# atleast one of each CPython/PyPy tests must be in windows
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: 3.6
|
python-version: '3.8'
|
||||||
run-tests-ext: bat
|
- os: windows-latest
|
||||||
|
python-version: '3.12'
|
||||||
|
- os: windows-latest
|
||||||
|
python-version: pypy-3.9
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install pytest
|
- name: Install test requirements
|
||||||
run: pip install pytest
|
run: python3 ./devscripts/install_deps.py --include test --include curl-cffi
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
continue-on-error: False
|
continue-on-error: False
|
||||||
run: ./devscripts/run_tests.${{ matrix.run-tests-ext }} core
|
run: |
|
||||||
# Linter is in quick-test
|
python3 -m yt_dlp -v || true # Print debug head
|
||||||
|
python3 ./devscripts/run_tests.py core
|
||||||
|
|||||||
44
.github/workflows/download.yml
vendored
44
.github/workflows/download.yml
vendored
@@ -1,28 +1,48 @@
|
|||||||
name: Download Tests
|
name: Download Tests
|
||||||
on: [push, pull_request]
|
on: [push, pull_request]
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
tests:
|
quick:
|
||||||
name: Download Tests
|
name: Quick Download Tests
|
||||||
if: "contains(github.event.head_commit.message, 'ci run dl')"
|
if: "contains(github.event.head_commit.message, 'ci run dl')"
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: 3.9
|
||||||
|
- name: Install test requirements
|
||||||
|
run: python3 ./devscripts/install_deps.py --include dev
|
||||||
|
- name: Run tests
|
||||||
|
continue-on-error: true
|
||||||
|
run: python3 ./devscripts/run_tests.py download
|
||||||
|
|
||||||
|
full:
|
||||||
|
name: Full Download Tests
|
||||||
|
if: "contains(github.event.head_commit.message, 'ci run dl all')"
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-18.04]
|
os: [ubuntu-latest]
|
||||||
python-version: [3.7, 3.8, 3.9, 3.10-dev, pypy-3.6, pypy-3.7]
|
python-version: ['3.10', '3.11', '3.12', pypy-3.8, pypy-3.10]
|
||||||
run-tests-ext: [sh]
|
|
||||||
include:
|
include:
|
||||||
|
# atleast one of each CPython/PyPy tests must be in windows
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: 3.6
|
python-version: '3.8'
|
||||||
run-tests-ext: bat
|
- os: windows-latest
|
||||||
|
python-version: pypy-3.9
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install pytest
|
- name: Install test requirements
|
||||||
run: pip install pytest
|
run: python3 ./devscripts/install_deps.py --include dev
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
run: ./devscripts/run_tests.${{ matrix.run-tests-ext }} download
|
run: python3 ./devscripts/run_tests.py download
|
||||||
|
|||||||
40
.github/workflows/quick-test.yml
vendored
40
.github/workflows/quick-test.yml
vendored
@@ -1,33 +1,39 @@
|
|||||||
name: Quick Test
|
name: Quick Test
|
||||||
on: [push, pull_request]
|
on: [push, pull_request]
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
tests:
|
tests:
|
||||||
name: Core Test
|
name: Core Test
|
||||||
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Python
|
- name: Set up Python 3.8
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: '3.8'
|
||||||
- name: Install test requirements
|
- name: Install test requirements
|
||||||
run: pip install pytest pycryptodomex
|
run: python3 ./devscripts/install_deps.py --include test
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: ./devscripts/run_tests.sh core
|
run: |
|
||||||
flake8:
|
python3 -m yt_dlp -v || true
|
||||||
name: Linter
|
python3 ./devscripts/run_tests.py core
|
||||||
|
check:
|
||||||
|
name: Code check
|
||||||
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Python
|
- uses: actions/setup-python@v5
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: '3.8'
|
||||||
- name: Install flake8
|
- name: Install dev dependencies
|
||||||
run: pip install flake8
|
run: python3 ./devscripts/install_deps.py -o --include static-analysis
|
||||||
- name: Make lazy extractors
|
- name: Make lazy extractors
|
||||||
run: python devscripts/make_lazy_extractors.py
|
run: python3 ./devscripts/make_lazy_extractors.py
|
||||||
- name: Run flake8
|
- name: Run ruff
|
||||||
run: flake8 .
|
run: ruff check --output-format github .
|
||||||
|
- name: Run autopep8
|
||||||
|
run: autopep8 --diff .
|
||||||
|
|||||||
29
.github/workflows/release-master.yml
vendored
Normal file
29
.github/workflows/release-master.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
name: Release (master)
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
paths:
|
||||||
|
- "yt_dlp/**.py"
|
||||||
|
- "!yt_dlp/version.py"
|
||||||
|
- "bundle/*.py"
|
||||||
|
- "pyproject.toml"
|
||||||
|
- "Makefile"
|
||||||
|
- ".github/workflows/build.yml"
|
||||||
|
concurrency:
|
||||||
|
group: release-master
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
release:
|
||||||
|
if: vars.BUILD_MASTER != ''
|
||||||
|
uses: ./.github/workflows/release.yml
|
||||||
|
with:
|
||||||
|
prerelease: true
|
||||||
|
source: master
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
packages: write
|
||||||
|
id-token: write # mandatory for trusted publishing
|
||||||
|
secrets: inherit
|
||||||
42
.github/workflows/release-nightly.yml
vendored
Normal file
42
.github/workflows/release-nightly.yml
vendored
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
name: Release (nightly)
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '23 23 * * *'
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check_nightly:
|
||||||
|
if: vars.BUILD_NIGHTLY != ''
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
commit: ${{ steps.check_for_new_commits.outputs.commit }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Check for new commits
|
||||||
|
id: check_for_new_commits
|
||||||
|
run: |
|
||||||
|
relevant_files=(
|
||||||
|
"yt_dlp/*.py"
|
||||||
|
':!yt_dlp/version.py'
|
||||||
|
"bundle/*.py"
|
||||||
|
"pyproject.toml"
|
||||||
|
"Makefile"
|
||||||
|
".github/workflows/build.yml"
|
||||||
|
)
|
||||||
|
echo "commit=$(git log --format=%H -1 --since="24 hours ago" -- "${relevant_files[@]}")" | tee "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
release:
|
||||||
|
needs: [check_nightly]
|
||||||
|
if: ${{ needs.check_nightly.outputs.commit }}
|
||||||
|
uses: ./.github/workflows/release.yml
|
||||||
|
with:
|
||||||
|
prerelease: true
|
||||||
|
source: nightly
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
packages: write
|
||||||
|
id-token: write # mandatory for trusted publishing
|
||||||
|
secrets: inherit
|
||||||
383
.github/workflows/release.yml
vendored
Normal file
383
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,383 @@
|
|||||||
|
name: Release
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
prerelease:
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
source:
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
target:
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
version:
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
source:
|
||||||
|
description: |
|
||||||
|
SOURCE of this release's updates:
|
||||||
|
channel, repo, tag, or channel/repo@tag
|
||||||
|
(default: <current_repo>)
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
target:
|
||||||
|
description: |
|
||||||
|
TARGET to publish this release to:
|
||||||
|
channel, tag, or channel@tag
|
||||||
|
(default: <source> if writable else <current_repo>[@source_tag])
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
version:
|
||||||
|
description: |
|
||||||
|
VERSION: yyyy.mm.dd[.rev] or rev
|
||||||
|
(default: auto-generated)
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
prerelease:
|
||||||
|
description: Pre-release
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
prepare:
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
channel: ${{ steps.setup_variables.outputs.channel }}
|
||||||
|
version: ${{ steps.setup_variables.outputs.version }}
|
||||||
|
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||||
|
target_repo_token: ${{ steps.setup_variables.outputs.target_repo_token }}
|
||||||
|
target_tag: ${{ steps.setup_variables.outputs.target_tag }}
|
||||||
|
pypi_project: ${{ steps.setup_variables.outputs.pypi_project }}
|
||||||
|
pypi_suffix: ${{ steps.setup_variables.outputs.pypi_suffix }}
|
||||||
|
head_sha: ${{ steps.get_target.outputs.head_sha }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
|
||||||
|
- name: Process inputs
|
||||||
|
id: process_inputs
|
||||||
|
run: |
|
||||||
|
cat << EOF
|
||||||
|
::group::Inputs
|
||||||
|
prerelease=${{ inputs.prerelease }}
|
||||||
|
source=${{ inputs.source }}
|
||||||
|
target=${{ inputs.target }}
|
||||||
|
version=${{ inputs.version }}
|
||||||
|
::endgroup::
|
||||||
|
EOF
|
||||||
|
IFS='@' read -r source_repo source_tag <<<"${{ inputs.source }}"
|
||||||
|
IFS='@' read -r target_repo target_tag <<<"${{ inputs.target }}"
|
||||||
|
cat << EOF >> "$GITHUB_OUTPUT"
|
||||||
|
source_repo=${source_repo}
|
||||||
|
source_tag=${source_tag}
|
||||||
|
target_repo=${target_repo}
|
||||||
|
target_tag=${target_tag}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Setup variables
|
||||||
|
id: setup_variables
|
||||||
|
env:
|
||||||
|
source_repo: ${{ steps.process_inputs.outputs.source_repo }}
|
||||||
|
source_tag: ${{ steps.process_inputs.outputs.source_tag }}
|
||||||
|
target_repo: ${{ steps.process_inputs.outputs.target_repo }}
|
||||||
|
target_tag: ${{ steps.process_inputs.outputs.target_tag }}
|
||||||
|
run: |
|
||||||
|
# unholy bash monstrosity (sincere apologies)
|
||||||
|
fallback_token () {
|
||||||
|
if ${{ !secrets.ARCHIVE_REPO_TOKEN }}; then
|
||||||
|
echo "::error::Repository access secret ${target_repo_token^^} not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
target_repo_token=ARCHIVE_REPO_TOKEN
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
source_is_channel=0
|
||||||
|
[[ "${source_repo}" == 'stable' ]] && source_repo='yt-dlp/yt-dlp'
|
||||||
|
if [[ -z "${source_repo}" ]]; then
|
||||||
|
source_repo='${{ github.repository }}'
|
||||||
|
elif [[ '${{ vars[format('{0}_archive_repo', env.source_repo)] }}' ]]; then
|
||||||
|
source_is_channel=1
|
||||||
|
source_channel='${{ vars[format('{0}_archive_repo', env.source_repo)] }}'
|
||||||
|
elif [[ -z "${source_tag}" && "${source_repo}" != */* ]]; then
|
||||||
|
source_tag="${source_repo}"
|
||||||
|
source_repo='${{ github.repository }}'
|
||||||
|
fi
|
||||||
|
resolved_source="${source_repo}"
|
||||||
|
if [[ "${source_tag}" ]]; then
|
||||||
|
resolved_source="${resolved_source}@${source_tag}"
|
||||||
|
elif [[ "${source_repo}" == 'yt-dlp/yt-dlp' ]]; then
|
||||||
|
resolved_source='stable'
|
||||||
|
fi
|
||||||
|
|
||||||
|
revision="${{ (inputs.prerelease || !vars.PUSH_VERSION_COMMIT) && '$(date -u +"%H%M%S")' || '' }}"
|
||||||
|
version="$(
|
||||||
|
python devscripts/update-version.py \
|
||||||
|
-c "${resolved_source}" -r "${{ github.repository }}" ${{ inputs.version || '$revision' }} | \
|
||||||
|
grep -Po "version=\K\d+\.\d+\.\d+(\.\d+)?")"
|
||||||
|
|
||||||
|
if [[ "${target_repo}" ]]; then
|
||||||
|
if [[ -z "${target_tag}" ]]; then
|
||||||
|
if [[ '${{ vars[format('{0}_archive_repo', env.target_repo)] }}' ]]; then
|
||||||
|
target_tag="${source_tag:-${version}}"
|
||||||
|
else
|
||||||
|
target_tag="${target_repo}"
|
||||||
|
target_repo='${{ github.repository }}'
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
if [[ "${target_repo}" != '${{ github.repository}}' ]]; then
|
||||||
|
target_repo='${{ vars[format('{0}_archive_repo', env.target_repo)] }}'
|
||||||
|
target_repo_token='${{ env.target_repo }}_archive_repo_token'
|
||||||
|
${{ !!secrets[format('{0}_archive_repo_token', env.target_repo)] }} || fallback_token
|
||||||
|
pypi_project='${{ vars[format('{0}_pypi_project', env.target_repo)] }}'
|
||||||
|
pypi_suffix='${{ vars[format('{0}_pypi_suffix', env.target_repo)] }}'
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
target_tag="${source_tag:-${version}}"
|
||||||
|
if ((source_is_channel)); then
|
||||||
|
target_repo="${source_channel}"
|
||||||
|
target_repo_token='${{ env.source_repo }}_archive_repo_token'
|
||||||
|
${{ !!secrets[format('{0}_archive_repo_token', env.source_repo)] }} || fallback_token
|
||||||
|
pypi_project='${{ vars[format('{0}_pypi_project', env.source_repo)] }}'
|
||||||
|
pypi_suffix='${{ vars[format('{0}_pypi_suffix', env.source_repo)] }}'
|
||||||
|
else
|
||||||
|
target_repo='${{ github.repository }}'
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "${target_repo}" == '${{ github.repository }}' ]] && ${{ !inputs.prerelease }}; then
|
||||||
|
pypi_project='${{ vars.PYPI_PROJECT }}'
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "::group::Output variables"
|
||||||
|
cat << EOF | tee -a "$GITHUB_OUTPUT"
|
||||||
|
channel=${resolved_source}
|
||||||
|
version=${version}
|
||||||
|
target_repo=${target_repo}
|
||||||
|
target_repo_token=${target_repo_token}
|
||||||
|
target_tag=${target_tag}
|
||||||
|
pypi_project=${pypi_project}
|
||||||
|
pypi_suffix=${pypi_suffix}
|
||||||
|
EOF
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Update documentation
|
||||||
|
env:
|
||||||
|
version: ${{ steps.setup_variables.outputs.version }}
|
||||||
|
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||||
|
if: |
|
||||||
|
!inputs.prerelease && env.target_repo == github.repository
|
||||||
|
run: |
|
||||||
|
python devscripts/update_changelog.py -vv
|
||||||
|
make doc
|
||||||
|
|
||||||
|
- name: Push to release
|
||||||
|
id: push_release
|
||||||
|
env:
|
||||||
|
version: ${{ steps.setup_variables.outputs.version }}
|
||||||
|
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||||
|
if: |
|
||||||
|
!inputs.prerelease && env.target_repo == github.repository
|
||||||
|
run: |
|
||||||
|
git config --global user.name "github-actions[bot]"
|
||||||
|
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||||
|
git add -u
|
||||||
|
git commit -m "Release ${{ env.version }}" \
|
||||||
|
-m "Created by: ${{ github.event.sender.login }}" -m ":ci skip all :ci run dl"
|
||||||
|
git push origin --force ${{ github.event.ref }}:release
|
||||||
|
|
||||||
|
- name: Get target commitish
|
||||||
|
id: get_target
|
||||||
|
run: |
|
||||||
|
echo "head_sha=$(git rev-parse HEAD)" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Update master
|
||||||
|
env:
|
||||||
|
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||||
|
if: |
|
||||||
|
vars.PUSH_VERSION_COMMIT != '' && !inputs.prerelease && env.target_repo == github.repository
|
||||||
|
run: git push origin ${{ github.event.ref }}
|
||||||
|
|
||||||
|
build:
|
||||||
|
needs: prepare
|
||||||
|
uses: ./.github/workflows/build.yml
|
||||||
|
with:
|
||||||
|
version: ${{ needs.prepare.outputs.version }}
|
||||||
|
channel: ${{ needs.prepare.outputs.channel }}
|
||||||
|
origin: ${{ needs.prepare.outputs.target_repo }}
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write # For package cache
|
||||||
|
secrets:
|
||||||
|
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||||
|
|
||||||
|
publish_pypi:
|
||||||
|
needs: [prepare, build]
|
||||||
|
if: ${{ needs.prepare.outputs.pypi_project }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
id-token: write # mandatory for trusted publishing
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
|
||||||
|
- name: Install Requirements
|
||||||
|
run: |
|
||||||
|
sudo apt -y install pandoc man
|
||||||
|
python devscripts/install_deps.py -o --include build
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
env:
|
||||||
|
version: ${{ needs.prepare.outputs.version }}
|
||||||
|
suffix: ${{ needs.prepare.outputs.pypi_suffix }}
|
||||||
|
channel: ${{ needs.prepare.outputs.channel }}
|
||||||
|
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||||
|
pypi_project: ${{ needs.prepare.outputs.pypi_project }}
|
||||||
|
run: |
|
||||||
|
python devscripts/update-version.py -c "${{ env.channel }}" -r "${{ env.target_repo }}" -s "${{ env.suffix }}" "${{ env.version }}"
|
||||||
|
python devscripts/update_changelog.py -vv
|
||||||
|
python devscripts/make_lazy_extractors.py
|
||||||
|
sed -i -E '0,/(name = ")[^"]+(")/s//\1${{ env.pypi_project }}\2/' pyproject.toml
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: |
|
||||||
|
rm -rf dist/*
|
||||||
|
make pypi-files
|
||||||
|
printf '%s\n\n' \
|
||||||
|
'Official repository: <https://github.com/yt-dlp/yt-dlp>' \
|
||||||
|
'**PS**: Some links in this document will not work since this is a copy of the README.md from Github' > ./README.md.new
|
||||||
|
cat ./README.md >> ./README.md.new && mv -f ./README.md.new ./README.md
|
||||||
|
python devscripts/set-variant.py pip -M "You installed yt-dlp with pip or using the wheel from PyPi; Use that to update"
|
||||||
|
make clean-cache
|
||||||
|
python -m build --no-isolation .
|
||||||
|
|
||||||
|
- name: Publish to PyPI
|
||||||
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
|
with:
|
||||||
|
verbose: true
|
||||||
|
|
||||||
|
publish:
|
||||||
|
needs: [prepare, build]
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: artifact
|
||||||
|
pattern: build-*
|
||||||
|
merge-multiple: true
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
|
||||||
|
- name: Generate release notes
|
||||||
|
env:
|
||||||
|
head_sha: ${{ needs.prepare.outputs.head_sha }}
|
||||||
|
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||||
|
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
||||||
|
run: |
|
||||||
|
printf '%s' \
|
||||||
|
'[]' \
|
||||||
|
'(https://github.com/${{ github.repository }}#installation "Installation instructions") ' \
|
||||||
|
'[]' \
|
||||||
|
'(https://discord.gg/H5MNcFW63r "Discord") ' \
|
||||||
|
'[]' \
|
||||||
|
'(https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators "Donate") ' \
|
||||||
|
'[]' \
|
||||||
|
'(https://github.com/${{ github.repository }}' \
|
||||||
|
'${{ env.target_repo == github.repository && format('/tree/{0}', env.target_tag) || '' }}#readme "Documentation") ' \
|
||||||
|
${{ env.target_repo == 'yt-dlp/yt-dlp' && '\
|
||||||
|
"[]" \
|
||||||
|
"(https://github.com/yt-dlp/yt-dlp-nightly-builds/releases/latest \"Nightly builds\") " \
|
||||||
|
"[]" \
|
||||||
|
"(https://github.com/yt-dlp/yt-dlp-master-builds/releases/latest \"Master builds\")"' || '' }} > ./RELEASE_NOTES
|
||||||
|
printf '\n\n' >> ./RELEASE_NOTES
|
||||||
|
cat >> ./RELEASE_NOTES << EOF
|
||||||
|
#### A description of the various files are in the [README](https://github.com/${{ github.repository }}#release-files)
|
||||||
|
---
|
||||||
|
$(python ./devscripts/make_changelog.py -vv --collapsible)
|
||||||
|
EOF
|
||||||
|
printf '%s\n\n' '**This is a pre-release build**' >> ./PRERELEASE_NOTES
|
||||||
|
cat ./RELEASE_NOTES >> ./PRERELEASE_NOTES
|
||||||
|
printf '%s\n\n' 'Generated from: https://github.com/${{ github.repository }}/commit/${{ env.head_sha }}' >> ./ARCHIVE_NOTES
|
||||||
|
cat ./RELEASE_NOTES >> ./ARCHIVE_NOTES
|
||||||
|
|
||||||
|
- name: Publish to archive repo
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets[needs.prepare.outputs.target_repo_token] }}
|
||||||
|
GH_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||||
|
version: ${{ needs.prepare.outputs.version }}
|
||||||
|
channel: ${{ needs.prepare.outputs.channel }}
|
||||||
|
if: |
|
||||||
|
inputs.prerelease && env.GH_TOKEN != '' && env.GH_REPO != '' && env.GH_REPO != github.repository
|
||||||
|
run: |
|
||||||
|
title="${{ startswith(env.GH_REPO, 'yt-dlp/') && 'yt-dlp ' || '' }}${{ env.channel }}"
|
||||||
|
gh release create \
|
||||||
|
--notes-file ARCHIVE_NOTES \
|
||||||
|
--title "${title} ${{ env.version }}" \
|
||||||
|
${{ env.version }} \
|
||||||
|
artifact/*
|
||||||
|
|
||||||
|
- name: Prune old release
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
version: ${{ needs.prepare.outputs.version }}
|
||||||
|
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||||
|
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
||||||
|
if: |
|
||||||
|
env.target_repo == github.repository && env.target_tag != env.version
|
||||||
|
run: |
|
||||||
|
gh release delete --yes --cleanup-tag "${{ env.target_tag }}" || true
|
||||||
|
git tag --delete "${{ env.target_tag }}" || true
|
||||||
|
sleep 5 # Enough time to cover deletion race condition
|
||||||
|
|
||||||
|
- name: Publish release
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
version: ${{ needs.prepare.outputs.version }}
|
||||||
|
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||||
|
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
||||||
|
head_sha: ${{ needs.prepare.outputs.head_sha }}
|
||||||
|
if: |
|
||||||
|
env.target_repo == github.repository
|
||||||
|
run: |
|
||||||
|
title="${{ github.repository == 'yt-dlp/yt-dlp' && 'yt-dlp ' || '' }}"
|
||||||
|
title+="${{ env.target_tag != env.version && format('{0} ', env.target_tag) || '' }}"
|
||||||
|
gh release create \
|
||||||
|
--notes-file ${{ inputs.prerelease && 'PRERELEASE_NOTES' || 'RELEASE_NOTES' }} \
|
||||||
|
--target ${{ env.head_sha }} \
|
||||||
|
--title "${title}${{ env.version }}" \
|
||||||
|
${{ inputs.prerelease && '--prerelease' || '' }} \
|
||||||
|
${{ env.target_tag }} \
|
||||||
|
artifact/*
|
||||||
21
.gitignore
vendored
21
.gitignore
vendored
@@ -27,10 +27,13 @@ cookies
|
|||||||
*.ass
|
*.ass
|
||||||
*.avi
|
*.avi
|
||||||
*.desktop
|
*.desktop
|
||||||
|
*.f4v
|
||||||
*.flac
|
*.flac
|
||||||
*.flv
|
*.flv
|
||||||
|
*.gif
|
||||||
*.jpeg
|
*.jpeg
|
||||||
*.jpg
|
*.jpg
|
||||||
|
*.lrc
|
||||||
*.m4a
|
*.m4a
|
||||||
*.m4v
|
*.m4v
|
||||||
*.mhtml
|
*.mhtml
|
||||||
@@ -38,13 +41,18 @@ cookies
|
|||||||
*.mov
|
*.mov
|
||||||
*.mp3
|
*.mp3
|
||||||
*.mp4
|
*.mp4
|
||||||
|
*.mpg
|
||||||
|
*.mpga
|
||||||
|
*.oga
|
||||||
*.ogg
|
*.ogg
|
||||||
*.opus
|
*.opus
|
||||||
*.png
|
*.png
|
||||||
*.sbv
|
*.sbv
|
||||||
*.srt
|
*.srt
|
||||||
|
*.ssa
|
||||||
*.swf
|
*.swf
|
||||||
*.swp
|
*.swp
|
||||||
|
*.tt
|
||||||
*.ttml
|
*.ttml
|
||||||
*.url
|
*.url
|
||||||
*.vtt
|
*.vtt
|
||||||
@@ -59,7 +67,7 @@ cookies
|
|||||||
# Python
|
# Python
|
||||||
*.pyc
|
*.pyc
|
||||||
*.pyo
|
*.pyo
|
||||||
.pytest_cache
|
.*_cache
|
||||||
wine-py2exe/
|
wine-py2exe/
|
||||||
py2exe.log
|
py2exe.log
|
||||||
build/
|
build/
|
||||||
@@ -67,6 +75,7 @@ dist/
|
|||||||
zip/
|
zip/
|
||||||
tmp/
|
tmp/
|
||||||
venv/
|
venv/
|
||||||
|
.venv/
|
||||||
completions/
|
completions/
|
||||||
|
|
||||||
# Misc
|
# Misc
|
||||||
@@ -82,6 +91,8 @@ updates_key.pem
|
|||||||
*.egg-info
|
*.egg-info
|
||||||
.tox
|
.tox
|
||||||
*.class
|
*.class
|
||||||
|
*.isorted
|
||||||
|
*.stackdump
|
||||||
|
|
||||||
# Generated
|
# Generated
|
||||||
AUTHORS
|
AUTHORS
|
||||||
@@ -113,9 +124,5 @@ yt-dlp.zip
|
|||||||
*/extractor/lazy_extractors.py
|
*/extractor/lazy_extractors.py
|
||||||
|
|
||||||
# Plugins
|
# Plugins
|
||||||
ytdlp_plugins/extractor/*
|
ytdlp_plugins/
|
||||||
!ytdlp_plugins/extractor/__init__.py
|
yt-dlp-plugins
|
||||||
!ytdlp_plugins/extractor/sample.py
|
|
||||||
ytdlp_plugins/postprocessor/*
|
|
||||||
!ytdlp_plugins/postprocessor/__init__.py
|
|
||||||
!ytdlp_plugins/postprocessor/sample.py
|
|
||||||
|
|||||||
14
.pre-commit-config.yaml
Normal file
14
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
repos:
|
||||||
|
- repo: local
|
||||||
|
hooks:
|
||||||
|
- id: linter
|
||||||
|
name: Apply linter fixes
|
||||||
|
entry: ruff check --fix .
|
||||||
|
language: system
|
||||||
|
types: [python]
|
||||||
|
require_serial: true
|
||||||
|
- id: format
|
||||||
|
name: Apply formatting fixes
|
||||||
|
entry: autopep8 --in-place .
|
||||||
|
language: system
|
||||||
|
types: [python]
|
||||||
9
.pre-commit-hatch.yaml
Normal file
9
.pre-commit-hatch.yaml
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
repos:
|
||||||
|
- repo: local
|
||||||
|
hooks:
|
||||||
|
- id: fix
|
||||||
|
name: Apply code fixes
|
||||||
|
entry: hatch fmt
|
||||||
|
language: system
|
||||||
|
types: [python]
|
||||||
|
require_serial: true
|
||||||
265
CONTRIBUTING.md
265
CONTRIBUTING.md
@@ -79,7 +79,7 @@ Before reporting any issue, type `yt-dlp -U`. This should report that you're up-
|
|||||||
|
|
||||||
### Is the issue already documented?
|
### Is the issue already documented?
|
||||||
|
|
||||||
Make sure that someone has not already opened the issue you're trying to open. Search at the top of the window or browse the [GitHub Issues](https://github.com/yt-dlp/yt-dlp/search?type=Issues) of this repository. If there is an issue, feel free to write something along the lines of "This affects me as well, with version 2021.01.01. Here is some more information on the issue: ...". While some issues may be old, a new post into them often spurs rapid activity.
|
Make sure that someone has not already opened the issue you're trying to open. Search at the top of the window or browse the [GitHub Issues](https://github.com/yt-dlp/yt-dlp/search?type=Issues) of this repository. If there is an issue, subscribe to it to be notified when there is any progress. Unless you have something useful to add to the conversation, please refrain from commenting.
|
||||||
|
|
||||||
Additionally, it is also helpful to see if the issue has already been documented in the [youtube-dl issue tracker](https://github.com/ytdl-org/youtube-dl/issues). If similar issues have already been reported in youtube-dl (but not in our issue tracker), links to them can be included in your issue report here.
|
Additionally, it is also helpful to see if the issue has already been documented in the [youtube-dl issue tracker](https://github.com/ytdl-org/youtube-dl/issues). If similar issues have already been reported in youtube-dl (but not in our issue tracker), links to them can be included in your issue report here.
|
||||||
|
|
||||||
@@ -127,58 +127,93 @@ While these steps won't necessarily ensure that no misuse of the account takes p
|
|||||||
|
|
||||||
### Is the website primarily used for piracy?
|
### Is the website primarily used for piracy?
|
||||||
|
|
||||||
We follow [youtube-dl's policy](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) to not support services that is primarily used for infringing copyright. Additionally, it has been decided to not to support porn sites that specialize in deep fake. We also cannot support any service that serves only [DRM protected content](https://en.wikipedia.org/wiki/Digital_rights_management).
|
We follow [youtube-dl's policy](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) to not support services that is primarily used for infringing copyright. Additionally, it has been decided to not to support porn sites that specialize in fakes. We also cannot support any service that serves only [DRM protected content](https://en.wikipedia.org/wiki/Digital_rights_management).
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# DEVELOPER INSTRUCTIONS
|
# DEVELOPER INSTRUCTIONS
|
||||||
|
|
||||||
Most users do not need to build yt-dlp and can [download the builds](https://github.com/yt-dlp/yt-dlp/releases) or get them via [the other installation methods](README.md#installation).
|
Most users do not need to build yt-dlp and can [download the builds](https://github.com/yt-dlp/yt-dlp/releases), get them via [the other installation methods](README.md#installation) or directly run it using `python -m yt_dlp`.
|
||||||
|
|
||||||
To run yt-dlp as a developer, you don't need to build anything either. Simply execute
|
`yt-dlp` uses [`hatch`](<https://hatch.pypa.io>) as a project management tool.
|
||||||
|
You can easily install it using [`pipx`](<https://pipx.pypa.io>) via `pipx install hatch`, or else via `pip` or your package manager of choice. Make sure you are using at least version `1.10.0`, otherwise some functionality might not work as expected.
|
||||||
|
|
||||||
python -m yt_dlp
|
If you plan on contributing to `yt-dlp`, best practice is to start by running the following command:
|
||||||
|
|
||||||
To run the test, simply invoke your favorite test runner, or execute a test file directly; any of the following work:
|
```shell
|
||||||
|
$ hatch run setup
|
||||||
|
```
|
||||||
|
|
||||||
python -m unittest discover
|
The above command will install a `pre-commit` hook so that required checks/fixes (linting, formatting) will run automatically before each commit. If any code needs to be linted or formatted, then the commit will be blocked and the necessary changes will be made; you should review all edits and re-commit the fixed version.
|
||||||
python test/test_download.py
|
|
||||||
nosetests
|
After this you can use `hatch shell` to enable a virtual environment that has `yt-dlp` and its development dependencies installed.
|
||||||
pytest
|
|
||||||
|
In addition, the following script commands can be used to run simple tasks such as linting or testing (without having to run `hatch shell` first):
|
||||||
|
* `hatch fmt`: Automatically fix linter violations and apply required code formatting changes
|
||||||
|
* See `hatch fmt --help` for more info
|
||||||
|
* `hatch test`: Run extractor or core tests
|
||||||
|
* See `hatch test --help` for more info
|
||||||
|
|
||||||
See item 6 of [new extractor tutorial](#adding-support-for-a-new-site) for how to run extractor specific test cases.
|
See item 6 of [new extractor tutorial](#adding-support-for-a-new-site) for how to run extractor specific test cases.
|
||||||
|
|
||||||
|
While it is strongly recommended to use `hatch` for yt-dlp development, if you are unable to do so, alternatively you can manually create a virtual environment and use the following commands:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# To only install development dependencies:
|
||||||
|
$ python -m devscripts.install_deps --include dev
|
||||||
|
|
||||||
|
# Or, for an editable install plus dev dependencies:
|
||||||
|
$ python -m pip install -e ".[default,dev]"
|
||||||
|
|
||||||
|
# To setup the pre-commit hook:
|
||||||
|
$ pre-commit install
|
||||||
|
|
||||||
|
# To be used in place of `hatch test`:
|
||||||
|
$ python -m devscripts.run_tests
|
||||||
|
|
||||||
|
# To be used in place of `hatch fmt`:
|
||||||
|
$ ruff check --fix .
|
||||||
|
$ autopep8 --in-place .
|
||||||
|
|
||||||
|
# To only check code instead of applying fixes:
|
||||||
|
$ ruff check .
|
||||||
|
$ autopep8 --diff .
|
||||||
|
```
|
||||||
|
|
||||||
If you want to create a build of yt-dlp yourself, you can follow the instructions [here](README.md#compile).
|
If you want to create a build of yt-dlp yourself, you can follow the instructions [here](README.md#compile).
|
||||||
|
|
||||||
|
|
||||||
## Adding new feature or making overarching changes
|
## Adding new feature or making overarching changes
|
||||||
|
|
||||||
Before you start writing code for implementing a new feature, open an issue explaining your feature request and atleast one use case. This allows the maintainers to decide whether such a feature is desired for the project in the first place, and will provide an avenue to discuss some implementation details. If you open a pull request for a new feature without discussing with us first, do not be surprised when we ask for large changes to the code, or even reject it outright.
|
Before you start writing code for implementing a new feature, open an issue explaining your feature request and at least one use case. This allows the maintainers to decide whether such a feature is desired for the project in the first place, and will provide an avenue to discuss some implementation details. If you open a pull request for a new feature without discussing with us first, do not be surprised when we ask for large changes to the code, or even reject it outright.
|
||||||
|
|
||||||
The same applies for changes to the documentation, code style, or overarching changes to the architecture
|
The same applies for changes to the documentation, code style, or overarching changes to the architecture
|
||||||
|
|
||||||
|
|
||||||
## Adding support for a new site
|
## Adding support for a new site
|
||||||
|
|
||||||
If you want to add support for a new site, first of all **make sure** this site is **not dedicated to [copyright infringement](https://www.github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free)**. yt-dlp does **not support** such sites thus pull requests adding support for them **will be rejected**.
|
If you want to add support for a new site, first of all **make sure** this site is **not dedicated to [copyright infringement](#is-the-website-primarily-used-for-piracy)**. yt-dlp does **not support** such sites thus pull requests adding support for them **will be rejected**.
|
||||||
|
|
||||||
After you have ensured this site is distributing its content legally, you can follow this quick list (assuming your service is called `yourextractor`):
|
After you have ensured this site is distributing its content legally, you can follow this quick list (assuming your service is called `yourextractor`):
|
||||||
|
|
||||||
1. [Fork this repository](https://github.com/yt-dlp/yt-dlp/fork)
|
1. [Fork this repository](https://github.com/yt-dlp/yt-dlp/fork)
|
||||||
1. Check out the source code with:
|
1. Check out the source code with:
|
||||||
|
|
||||||
git clone git@github.com:YOUR_GITHUB_USERNAME/yt-dlp.git
|
```shell
|
||||||
|
$ git clone git@github.com:YOUR_GITHUB_USERNAME/yt-dlp.git
|
||||||
|
```
|
||||||
|
|
||||||
1. Start a new git branch with
|
1. Start a new git branch with
|
||||||
|
|
||||||
cd yt-dlp
|
```shell
|
||||||
git checkout -b yourextractor
|
$ cd yt-dlp
|
||||||
|
$ git checkout -b yourextractor
|
||||||
|
```
|
||||||
|
|
||||||
1. Start with this simple template and save it to `yt_dlp/extractor/yourextractor.py`:
|
1. Start with this simple template and save it to `yt_dlp/extractor/yourextractor.py`:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
# coding: utf-8
|
|
||||||
from .common import InfoExtractor
|
from .common import InfoExtractor
|
||||||
|
|
||||||
|
|
||||||
@@ -188,15 +223,21 @@ After you have ensured this site is distributing its content legally, you can fo
|
|||||||
'url': 'https://yourextractor.com/watch/42',
|
'url': 'https://yourextractor.com/watch/42',
|
||||||
'md5': 'TODO: md5 sum of the first 10241 bytes of the video file (use --test)',
|
'md5': 'TODO: md5 sum of the first 10241 bytes of the video file (use --test)',
|
||||||
'info_dict': {
|
'info_dict': {
|
||||||
|
# For videos, only the 'id' and 'ext' fields are required to RUN the test:
|
||||||
'id': '42',
|
'id': '42',
|
||||||
'ext': 'mp4',
|
'ext': 'mp4',
|
||||||
'title': 'Video title goes here',
|
# Then if the test run fails, it will output the missing/incorrect fields.
|
||||||
'thumbnail': r're:^https?://.*\.jpg$',
|
# Properties can be added as:
|
||||||
# TODO more properties, either as:
|
# * A value, e.g.
|
||||||
# * A value
|
# 'title': 'Video title goes here',
|
||||||
# * MD5 checksum; start the string with md5:
|
# * MD5 checksum; start the string with 'md5:', e.g.
|
||||||
# * A regular expression; start the string with re:
|
# 'description': 'md5:098f6bcd4621d373cade4e832627b4f6',
|
||||||
# * Any Python type (for example int or float)
|
# * A regular expression; start the string with 're:', e.g.
|
||||||
|
# 'thumbnail': r're:^https?://.*\.jpg$',
|
||||||
|
# * A count of elements in a list; start the string with 'count:', e.g.
|
||||||
|
# 'tags': 'count:10',
|
||||||
|
# * Any Python type, e.g.
|
||||||
|
# 'view_count': int,
|
||||||
}
|
}
|
||||||
}]
|
}]
|
||||||
|
|
||||||
@@ -215,27 +256,33 @@ After you have ensured this site is distributing its content legally, you can fo
|
|||||||
# TODO more properties (see yt_dlp/extractor/common.py)
|
# TODO more properties (see yt_dlp/extractor/common.py)
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
1. Add an import in [`yt_dlp/extractor/extractors.py`](yt_dlp/extractor/extractors.py).
|
1. Add an import in [`yt_dlp/extractor/_extractors.py`](yt_dlp/extractor/_extractors.py). Note that the class name must end with `IE`. Also note that when adding a parenthesized import group, the last import in the group must have a trailing comma in order for this formatting to be respected by our code formatter.
|
||||||
1. Run `python test/test_download.py TestDownload.test_YourExtractor` (note that `YourExtractor` doesn't end with `IE`). This *should fail* at first, but you can continually re-run it until you're done. If you decide to add more than one test, the tests will then be named `TestDownload.test_YourExtractor`, `TestDownload.test_YourExtractor_1`, `TestDownload.test_YourExtractor_2`, etc. Note that tests with `only_matching` key in test's dict are not counted in. You can also run all the tests in one go with `TestDownload.test_YourExtractor_all`
|
1. Run `hatch test YourExtractor`. This *may fail* at first, but you can continually re-run it until you're done. Upon failure, it will output the missing fields and/or correct values which you can copy. If you decide to add more than one test, the tests will then be named `YourExtractor`, `YourExtractor_1`, `YourExtractor_2`, etc. Note that tests with an `only_matching` key in the test's dict are not included in the count. You can also run all the tests in one go with `YourExtractor_all`
|
||||||
1. Make sure you have atleast one test for your extractor. Even if all videos covered by the extractor are expected to be inaccessible for automated testing, tests should still be added with a `skip` parameter indicating why the particular test is disabled from running.
|
1. Make sure you have at least one test for your extractor. Even if all videos covered by the extractor are expected to be inaccessible for automated testing, tests should still be added with a `skip` parameter indicating why the particular test is disabled from running.
|
||||||
1. Have a look at [`yt_dlp/extractor/common.py`](yt_dlp/extractor/common.py) for possible helper methods and a [detailed description of what your extractor should and may return](yt_dlp/extractor/common.py#L91-L426). Add tests and code for as many as you want.
|
1. Have a look at [`yt_dlp/extractor/common.py`](yt_dlp/extractor/common.py) for possible helper methods and a [detailed description of what your extractor should and may return](yt_dlp/extractor/common.py#L119-L440). Add tests and code for as many as you want.
|
||||||
1. Make sure your code follows [yt-dlp coding conventions](#yt-dlp-coding-conventions) and check the code with [flake8](https://flake8.pycqa.org/en/latest/index.html#quickstart):
|
1. Make sure your code follows [yt-dlp coding conventions](#yt-dlp-coding-conventions), passes [ruff](https://docs.astral.sh/ruff/tutorial/#getting-started) code checks and is properly formatted:
|
||||||
|
|
||||||
$ flake8 yt_dlp/extractor/yourextractor.py
|
```shell
|
||||||
|
$ hatch fmt --check
|
||||||
|
```
|
||||||
|
|
||||||
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython and PyPy for Python 3.6 and above. Backward compatibility is not required for even older versions of Python.
|
You can use `hatch fmt` to automatically fix problems.
|
||||||
|
|
||||||
|
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython and PyPy for Python 3.8 and above. Backward compatibility is not required for even older versions of Python.
|
||||||
1. When the tests pass, [add](https://git-scm.com/docs/git-add) the new files, [commit](https://git-scm.com/docs/git-commit) them and [push](https://git-scm.com/docs/git-push) the result, like this:
|
1. When the tests pass, [add](https://git-scm.com/docs/git-add) the new files, [commit](https://git-scm.com/docs/git-commit) them and [push](https://git-scm.com/docs/git-push) the result, like this:
|
||||||
|
|
||||||
$ git add yt_dlp/extractor/extractors.py
|
```shell
|
||||||
$ git add yt_dlp/extractor/yourextractor.py
|
$ git add yt_dlp/extractor/_extractors.py
|
||||||
$ git commit -m '[yourextractor] Add extractor'
|
$ git add yt_dlp/extractor/yourextractor.py
|
||||||
$ git push origin yourextractor
|
$ git commit -m '[yourextractor] Add extractor'
|
||||||
|
$ git push origin yourextractor
|
||||||
|
```
|
||||||
|
|
||||||
1. Finally, [create a pull request](https://help.github.com/articles/creating-a-pull-request). We'll then review and merge it.
|
1. Finally, [create a pull request](https://help.github.com/articles/creating-a-pull-request). We'll then review and merge it.
|
||||||
|
|
||||||
In any case, thank you very much for your contributions!
|
In any case, thank you very much for your contributions!
|
||||||
|
|
||||||
**Tip:** To test extractors that require login information, create a file `test/local_parameters.json` and add `"usenetrc": true` or your username and password in it:
|
**Tip:** To test extractors that require login information, create a file `test/local_parameters.json` and add `"usenetrc": true` or your `username`&`password` or `cookiefile`/`cookiesfrombrowser` in it:
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"username": "your user name",
|
"username": "your user name",
|
||||||
@@ -247,12 +294,12 @@ In any case, thank you very much for your contributions!
|
|||||||
|
|
||||||
This section introduces a guide lines for writing idiomatic, robust and future-proof extractor code.
|
This section introduces a guide lines for writing idiomatic, robust and future-proof extractor code.
|
||||||
|
|
||||||
Extractors are very fragile by nature since they depend on the layout of the source data provided by 3rd party media hosters out of your control and this layout tends to change. As an extractor implementer your task is not only to write code that will extract media links and metadata correctly but also to minimize dependency on the source's layout and even to make the code foresee potential future changes and be ready for that. This is important because it will allow the extractor not to break on minor layout changes thus keeping old yt-dlp versions working. Even though this breakage issue may be easily fixed by a new version of yt-dlp, this could take some time, during which the the extractor will remain broken.
|
Extractors are very fragile by nature since they depend on the layout of the source data provided by 3rd party media hosters out of your control and this layout tends to change. As an extractor implementer your task is not only to write code that will extract media links and metadata correctly but also to minimize dependency on the source's layout and even to make the code foresee potential future changes and be ready for that. This is important because it will allow the extractor not to break on minor layout changes thus keeping old yt-dlp versions working. Even though this breakage issue may be easily fixed by a new version of yt-dlp, this could take some time, during which the extractor will remain broken.
|
||||||
|
|
||||||
|
|
||||||
### Mandatory and optional metafields
|
### Mandatory and optional metafields
|
||||||
|
|
||||||
For extraction to work yt-dlp relies on metadata your extractor extracts and provides to yt-dlp expressed by an [information dictionary](yt_dlp/extractor/common.py#L91-L426) or simply *info dict*. Only the following meta fields in the *info dict* are considered mandatory for a successful extraction process by yt-dlp:
|
For extraction to work yt-dlp relies on metadata your extractor extracts and provides to yt-dlp expressed by an [information dictionary](yt_dlp/extractor/common.py#L119-L440) or simply *info dict*. Only the following meta fields in the *info dict* are considered mandatory for a successful extraction process by yt-dlp:
|
||||||
|
|
||||||
- `id` (media identifier)
|
- `id` (media identifier)
|
||||||
- `title` (media title)
|
- `title` (media title)
|
||||||
@@ -262,7 +309,7 @@ The aforementioned metafields are the critical data that the extraction does not
|
|||||||
|
|
||||||
For pornographic sites, appropriate `age_limit` must also be returned.
|
For pornographic sites, appropriate `age_limit` must also be returned.
|
||||||
|
|
||||||
The extractor is allowed to return the info dict without url or formats in some special cases if it allows the user to extract usefull information with `--ignore-no-formats-error` - Eg: when the video is a live stream that has not started yet.
|
The extractor is allowed to return the info dict without url or formats in some special cases if it allows the user to extract useful information with `--ignore-no-formats-error` - e.g. when the video is a live stream that has not started yet.
|
||||||
|
|
||||||
[Any field](yt_dlp/extractor/common.py#219-L426) apart from the aforementioned ones are considered **optional**. That means that extraction should be **tolerant** to situations when sources for these fields can potentially be unavailable (even if they are always available at the moment) and **future-proof** in order not to break the extraction of general purpose mandatory fields.
|
[Any field](yt_dlp/extractor/common.py#219-L426) apart from the aforementioned ones are considered **optional**. That means that extraction should be **tolerant** to situations when sources for these fields can potentially be unavailable (even if they are always available at the moment) and **future-proof** in order not to break the extraction of general purpose mandatory fields.
|
||||||
|
|
||||||
@@ -301,14 +348,10 @@ description = meta['summary'] # incorrect
|
|||||||
The latter will break extraction process with `KeyError` if `summary` disappears from `meta` at some later time but with the former approach extraction will just go ahead with `description` set to `None` which is perfectly fine (remember `None` is equivalent to the absence of data).
|
The latter will break extraction process with `KeyError` if `summary` disappears from `meta` at some later time but with the former approach extraction will just go ahead with `description` set to `None` which is perfectly fine (remember `None` is equivalent to the absence of data).
|
||||||
|
|
||||||
|
|
||||||
If the data is nested, do not use `.get` chains, but instead make use of the utility functions `try_get` or `traverse_obj`
|
If the data is nested, do not use `.get` chains, but instead make use of `traverse_obj`.
|
||||||
|
|
||||||
Considering the above `meta` again, assume you want to extract `["user"]["name"]` and put it in the resulting info dict as `uploader`
|
Considering the above `meta` again, assume you want to extract `["user"]["name"]` and put it in the resulting info dict as `uploader`
|
||||||
|
|
||||||
```python
|
|
||||||
uploader = try_get(meta, lambda x: x['user']['name']) # correct
|
|
||||||
```
|
|
||||||
or
|
|
||||||
```python
|
```python
|
||||||
uploader = traverse_obj(meta, ('user', 'name')) # correct
|
uploader = traverse_obj(meta, ('user', 'name')) # correct
|
||||||
```
|
```
|
||||||
@@ -322,6 +365,10 @@ or
|
|||||||
```python
|
```python
|
||||||
uploader = meta.get('user', {}).get('name') # incorrect
|
uploader = meta.get('user', {}).get('name') # incorrect
|
||||||
```
|
```
|
||||||
|
or
|
||||||
|
```python
|
||||||
|
uploader = try_get(meta, lambda x: x['user']['name']) # old utility
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
Similarly, you should pass `fatal=False` when extracting optional data from a webpage with `_search_regex`, `_html_search_regex` or similar methods, for instance:
|
Similarly, you should pass `fatal=False` when extracting optional data from a webpage with `_search_regex`, `_html_search_regex` or similar methods, for instance:
|
||||||
@@ -347,26 +394,42 @@ On failure this code will silently continue the extraction with `description` se
|
|||||||
|
|
||||||
Another thing to remember is not to try to iterate over `None`
|
Another thing to remember is not to try to iterate over `None`
|
||||||
|
|
||||||
Say you extracted a list of thumbnails into `thumbnail_data` using `try_get` and now want to iterate over them
|
Say you extracted a list of thumbnails into `thumbnail_data` and want to iterate over them
|
||||||
|
|
||||||
```python
|
```python
|
||||||
thumbnail_data = try_get(...)
|
thumbnail_data = data.get('thumbnails') or []
|
||||||
thumbnails = [{
|
thumbnails = [{
|
||||||
'url': item['url']
|
'url': item['url'],
|
||||||
} for item in thumbnail_data or []] # correct
|
'height': item.get('h'),
|
||||||
|
} for item in thumbnail_data if item.get('url')] # correct
|
||||||
```
|
```
|
||||||
|
|
||||||
and not like:
|
and not like:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
thumbnail_data = try_get(...)
|
thumbnail_data = data.get('thumbnails')
|
||||||
thumbnails = [{
|
thumbnails = [{
|
||||||
'url': item['url']
|
'url': item['url'],
|
||||||
|
'height': item.get('h'),
|
||||||
} for item in thumbnail_data] # incorrect
|
} for item in thumbnail_data] # incorrect
|
||||||
```
|
```
|
||||||
|
|
||||||
In the later case, `thumbnail_data` will be `None` if the field was not found and this will cause the loop `for item in thumbnail_data` to raise a fatal error. Using `for item in thumbnail_data or []` avoids this error and results in setting an empty list in `thumbnails` instead.
|
In this case, `thumbnail_data` will be `None` if the field was not found and this will cause the loop `for item in thumbnail_data` to raise a fatal error. Using `or []` avoids this error and results in setting an empty list in `thumbnails` instead.
|
||||||
|
|
||||||
|
Alternately, this can be further simplified by using `traverse_obj`
|
||||||
|
|
||||||
|
```python
|
||||||
|
thumbnails = [{
|
||||||
|
'url': item['url'],
|
||||||
|
'height': item.get('h'),
|
||||||
|
} for item in traverse_obj(data, ('thumbnails', lambda _, v: v['url']))]
|
||||||
|
```
|
||||||
|
|
||||||
|
or, even better,
|
||||||
|
|
||||||
|
```python
|
||||||
|
thumbnails = traverse_obj(data, ('thumbnails', ..., {'url': 'url', 'height': 'h'}))
|
||||||
|
```
|
||||||
|
|
||||||
### Provide fallbacks
|
### Provide fallbacks
|
||||||
|
|
||||||
@@ -375,21 +438,21 @@ When extracting metadata try to do so from multiple sources. For example if `tit
|
|||||||
|
|
||||||
#### Example
|
#### Example
|
||||||
|
|
||||||
Say `meta` from the previous example has a `title` and you are about to extract it. Since `title` is a mandatory meta field you should end up with something like:
|
Say `meta` from the previous example has a `title` and you are about to extract it like:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
title = meta['title']
|
title = meta.get('title')
|
||||||
```
|
```
|
||||||
|
|
||||||
If `title` disappears from `meta` in future due to some changes on the hoster's side the extraction would fail since `title` is mandatory. That's expected.
|
If `title` disappears from `meta` in future due to some changes on the hoster's side the title extraction would fail.
|
||||||
|
|
||||||
Assume that you have some another source you can extract `title` from, for example `og:title` HTML meta of a `webpage`. In this case you can provide a fallback scenario:
|
Assume that you have some another source you can extract `title` from, for example `og:title` HTML meta of a `webpage`. In this case you can provide a fallback like:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
title = meta.get('title') or self._og_search_title(webpage)
|
title = meta.get('title') or self._og_search_title(webpage)
|
||||||
```
|
```
|
||||||
|
|
||||||
This code will try to extract from `meta` first and if it fails it will try extracting `og:title` from a `webpage`.
|
This code will try to extract from `meta` first and if it fails it will try extracting `og:title` from a `webpage`, making the extractor more robust.
|
||||||
|
|
||||||
|
|
||||||
### Regular expressions
|
### Regular expressions
|
||||||
@@ -432,7 +495,7 @@ title = self._search_regex( # correct
|
|||||||
r'<span[^>]+class="title"[^>]*>([^<]+)', webpage, 'title')
|
r'<span[^>]+class="title"[^>]*>([^<]+)', webpage, 'title')
|
||||||
```
|
```
|
||||||
|
|
||||||
Or even better:
|
which tolerates potential changes in the `style` attribute's value. Or even better:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
title = self._search_regex( # correct
|
title = self._search_regex( # correct
|
||||||
@@ -440,7 +503,7 @@ title = self._search_regex( # correct
|
|||||||
webpage, 'title', group='title')
|
webpage, 'title', group='title')
|
||||||
```
|
```
|
||||||
|
|
||||||
Note how you tolerate potential changes in the `style` attribute's value or switch from using double quotes to single for `class` attribute:
|
which also handles both single quotes in addition to double quotes.
|
||||||
|
|
||||||
The code definitely should not look like:
|
The code definitely should not look like:
|
||||||
|
|
||||||
@@ -458,7 +521,42 @@ title = self._search_regex( # incorrect
|
|||||||
webpage, 'title', group='title')
|
webpage, 'title', group='title')
|
||||||
```
|
```
|
||||||
|
|
||||||
Here the presence or absence of other attributes including `style` is irrelevent for the data we need, and so the regex must not depend on it
|
Here the presence or absence of other attributes including `style` is irrelevant for the data we need, and so the regex must not depend on it
|
||||||
|
|
||||||
|
|
||||||
|
#### Keep the regular expressions as simple as possible, but no simpler
|
||||||
|
|
||||||
|
Since many extractors deal with unstructured data provided by websites, we will often need to use very complex regular expressions. You should try to use the *simplest* regex that can accomplish what you want. In other words, each part of the regex must have a reason for existing. If you can take out a symbol and the functionality does not change, the symbol should not be there.
|
||||||
|
|
||||||
|
##### Example
|
||||||
|
|
||||||
|
Correct:
|
||||||
|
|
||||||
|
```python
|
||||||
|
_VALID_URL = r'https?://(?:www\.)?website\.com/(?:[^/]+/){3,4}(?P<display_id>[^/]+)_(?P<id>\d+)'
|
||||||
|
```
|
||||||
|
|
||||||
|
Incorrect:
|
||||||
|
|
||||||
|
```python
|
||||||
|
_VALID_URL = r'https?:\/\/(?:www\.)?website\.com\/[^\/]+/[^\/]+/[^\/]+(?:\/[^\/]+)?\/(?P<display_id>[^\/]+)_(?P<id>\d+)'
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Do not misuse `.` and use the correct quantifiers (`+*?`)
|
||||||
|
|
||||||
|
Avoid creating regexes that over-match because of wrong use of quantifiers. Also try to avoid non-greedy matching (`?`) where possible since they could easily result in [catastrophic backtracking](https://www.regular-expressions.info/catastrophic.html)
|
||||||
|
|
||||||
|
Correct:
|
||||||
|
|
||||||
|
```python
|
||||||
|
title = self._search_regex(r'<span\b[^>]+class="title"[^>]*>([^<]+)', webpage, 'title')
|
||||||
|
```
|
||||||
|
|
||||||
|
Incorrect:
|
||||||
|
|
||||||
|
```python
|
||||||
|
title = self._search_regex(r'<span\b.*class="title".*>(.+?)<', webpage, 'title')
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
### Long lines policy
|
### Long lines policy
|
||||||
@@ -467,7 +565,7 @@ There is a soft limit to keep lines of code under 100 characters long. This mean
|
|||||||
|
|
||||||
For example, you should **never** split long string literals like URLs or some other often copied entities over multiple lines to fit this limit:
|
For example, you should **never** split long string literals like URLs or some other often copied entities over multiple lines to fit this limit:
|
||||||
|
|
||||||
Conversely, don't unecessarily split small lines further. As a rule of thumb, if removing the line split keeps the code under 80 characters, it should be a single line.
|
Conversely, don't unnecessarily split small lines further. As a rule of thumb, if removing the line split keeps the code under 80 characters, it should be a single line.
|
||||||
|
|
||||||
##### Examples
|
##### Examples
|
||||||
|
|
||||||
@@ -522,19 +620,22 @@ formats = self._extract_m3u8_formats(m3u8_url,
|
|||||||
|
|
||||||
### Quotes
|
### Quotes
|
||||||
|
|
||||||
Always use single quotes for strings (even if the string has `'`) and double quotes for docstrings. Use `'''` only for multi-line strings. An exception can be made if a string has multiple single quotes in it and escaping makes it significantly harder to read. For f-strings, use you can use double quotes on the inside. But avoid f-strings that have too many quotes inside.
|
Always use single quotes for strings (even if the string has `'`) and double quotes for docstrings. Use `'''` only for multi-line strings. An exception can be made if a string has multiple single quotes in it and escaping makes it *significantly* harder to read. For f-strings, use you can use double quotes on the inside. But avoid f-strings that have too many quotes inside.
|
||||||
|
|
||||||
|
|
||||||
### Inline values
|
### Inline values
|
||||||
|
|
||||||
Extracting variables is acceptable for reducing code duplication and improving readability of complex expressions. However, you should avoid extracting variables used only once and moving them to opposite parts of the extractor file, which makes reading the linear flow difficult.
|
Extracting variables is acceptable for reducing code duplication and improving readability of complex expressions. However, you should avoid extracting variables used only once and moving them to opposite parts of the extractor file, which makes reading the linear flow difficult.
|
||||||
|
|
||||||
#### Example
|
#### Examples
|
||||||
|
|
||||||
Correct:
|
Correct:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
title = self._html_search_regex(r'<h1>([^<]+)</h1>', webpage, 'title')
|
return {
|
||||||
|
'title': self._html_search_regex(r'<h1>([^<]+)</h1>', webpage, 'title'),
|
||||||
|
# ...some lines of code...
|
||||||
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
Incorrect:
|
Incorrect:
|
||||||
@@ -543,6 +644,11 @@ Incorrect:
|
|||||||
TITLE_RE = r'<h1>([^<]+)</h1>'
|
TITLE_RE = r'<h1>([^<]+)</h1>'
|
||||||
# ...some lines of code...
|
# ...some lines of code...
|
||||||
title = self._html_search_regex(TITLE_RE, webpage, 'title')
|
title = self._html_search_regex(TITLE_RE, webpage, 'title')
|
||||||
|
# ...some lines of code...
|
||||||
|
return {
|
||||||
|
'title': title,
|
||||||
|
# ...some lines of code...
|
||||||
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
@@ -574,33 +680,32 @@ Methods supporting list of patterns are: `_search_regex`, `_html_search_regex`,
|
|||||||
|
|
||||||
### Trailing parentheses
|
### Trailing parentheses
|
||||||
|
|
||||||
Always move trailing parentheses used for grouping/functions after the last argument. On the other hand, literal list/tuple/dict/set should closed be in a new line. Generators and list/dict comprehensions may use either style
|
Always move trailing parentheses used for grouping/functions after the last argument. On the other hand, multi-line literal list/tuple/dict/set should closed be in a new line. Generators and list/dict comprehensions may use either style
|
||||||
|
|
||||||
#### Examples
|
#### Examples
|
||||||
|
|
||||||
Correct:
|
Correct:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
url = try_get(
|
url = traverse_obj(info, (
|
||||||
info,
|
'context', 'dispatcher', 'stores', 'VideoTitlePageStore', 'data', 'video', 0, 'VideoUrlSet', 'VideoUrl'), list)
|
||||||
lambda x: x['ResultSet']['Result'][0]['VideoUrlSet']['VideoUrl'],
|
|
||||||
list)
|
|
||||||
```
|
```
|
||||||
Correct:
|
Correct:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
url = try_get(info,
|
url = traverse_obj(
|
||||||
lambda x: x['ResultSet']['Result'][0]['VideoUrlSet']['VideoUrl'],
|
info,
|
||||||
list)
|
('context', 'dispatcher', 'stores', 'VideoTitlePageStore', 'data', 'video', 0, 'VideoUrlSet', 'VideoUrl'),
|
||||||
|
list)
|
||||||
```
|
```
|
||||||
|
|
||||||
Incorrect:
|
Incorrect:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
url = try_get(
|
url = traverse_obj(
|
||||||
info,
|
info,
|
||||||
lambda x: x['ResultSet']['Result'][0]['VideoUrlSet']['VideoUrl'],
|
('context', 'dispatcher', 'stores', 'VideoTitlePageStore', 'data', 'video', 0, 'VideoUrlSet', 'VideoUrl'),
|
||||||
list,
|
list
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -639,7 +744,7 @@ formats = [
|
|||||||
|
|
||||||
### Use convenience conversion and parsing functions
|
### Use convenience conversion and parsing functions
|
||||||
|
|
||||||
Wrap all extracted numeric data into safe functions from [`yt_dlp/utils.py`](yt_dlp/utils.py): `int_or_none`, `float_or_none`. Use them for string to number conversions as well.
|
Wrap all extracted numeric data into safe functions from [`yt_dlp/utils/`](yt_dlp/utils/): `int_or_none`, `float_or_none`. Use them for string to number conversions as well.
|
||||||
|
|
||||||
Use `url_or_none` for safe URL processing.
|
Use `url_or_none` for safe URL processing.
|
||||||
|
|
||||||
@@ -647,23 +752,19 @@ Use `traverse_obj` and `try_call` (superseeds `dict_get` and `try_get`) for safe
|
|||||||
|
|
||||||
Use `unified_strdate` for uniform `upload_date` or any `YYYYMMDD` meta field extraction, `unified_timestamp` for uniform `timestamp` extraction, `parse_filesize` for `filesize` extraction, `parse_count` for count meta fields extraction, `parse_resolution`, `parse_duration` for `duration` extraction, `parse_age_limit` for `age_limit` extraction.
|
Use `unified_strdate` for uniform `upload_date` or any `YYYYMMDD` meta field extraction, `unified_timestamp` for uniform `timestamp` extraction, `parse_filesize` for `filesize` extraction, `parse_count` for count meta fields extraction, `parse_resolution`, `parse_duration` for `duration` extraction, `parse_age_limit` for `age_limit` extraction.
|
||||||
|
|
||||||
Explore [`yt_dlp/utils.py`](yt_dlp/utils.py) for more useful convenience functions.
|
Explore [`yt_dlp/utils/`](yt_dlp/utils/) for more useful convenience functions.
|
||||||
|
|
||||||
#### More examples
|
#### Examples
|
||||||
|
|
||||||
##### Safely extract optional description from parsed JSON
|
|
||||||
```python
|
```python
|
||||||
description = traverse_obj(response, ('result', 'video', 'summary'), expected_type=str)
|
description = traverse_obj(response, ('result', 'video', 'summary'), expected_type=str)
|
||||||
```
|
thumbnails = traverse_obj(response, ('result', 'thumbnails', ..., 'url'), expected_type=url_or_none)
|
||||||
|
|
||||||
##### Safely extract more optional metadata
|
|
||||||
```python
|
|
||||||
video = traverse_obj(response, ('result', 'video', 0), default={}, expected_type=dict)
|
video = traverse_obj(response, ('result', 'video', 0), default={}, expected_type=dict)
|
||||||
description = video.get('summary')
|
|
||||||
duration = float_or_none(video.get('durationMs'), scale=1000)
|
duration = float_or_none(video.get('durationMs'), scale=1000)
|
||||||
view_count = int_or_none(video.get('views'))
|
view_count = int_or_none(video.get('views'))
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
# My pull request is labeled pending-fixes
|
# My pull request is labeled pending-fixes
|
||||||
|
|
||||||
The `pending-fixes` label is added when there are changes requested to a PR. When the necessary changes are made, the label should be removed. However, despite our best efforts, it may sometimes happen that the maintainer did not see the changes or forgot to remove the label. If your PR is still marked as `pending-fixes` a few days after all requested changes have been made, feel free to ping the maintainer who labeled your issue and ask them to re-review and remove the label.
|
The `pending-fixes` label is added when there are changes requested to a PR. When the necessary changes are made, the label should be removed. However, despite our best efforts, it may sometimes happen that the maintainer did not see the changes or forgot to remove the label. If your PR is still marked as `pending-fixes` a few days after all requested changes have been made, feel free to ping the maintainer who labeled your issue and ask them to re-review and remove the label.
|
||||||
|
|||||||
401
CONTRIBUTORS
401
CONTRIBUTORS
@@ -2,7 +2,8 @@ pukkandan (owner)
|
|||||||
shirt-dev (collaborator)
|
shirt-dev (collaborator)
|
||||||
coletdjnz/colethedj (collaborator)
|
coletdjnz/colethedj (collaborator)
|
||||||
Ashish0804 (collaborator)
|
Ashish0804 (collaborator)
|
||||||
nao20010128nao/Lesmiscore (collaborator)
|
bashonly (collaborator)
|
||||||
|
Grub4K (collaborator)
|
||||||
h-h-h-h
|
h-h-h-h
|
||||||
pauldubois98
|
pauldubois98
|
||||||
nixxo
|
nixxo
|
||||||
@@ -231,3 +232,401 @@ Fam0r
|
|||||||
bohwaz
|
bohwaz
|
||||||
dodrian
|
dodrian
|
||||||
vvto33
|
vvto33
|
||||||
|
ca-za
|
||||||
|
connercsbn
|
||||||
|
diegorodriguezv
|
||||||
|
ekangmonyet
|
||||||
|
elyse0
|
||||||
|
evansp
|
||||||
|
GiedriusS
|
||||||
|
HE7086
|
||||||
|
JordanWeatherby
|
||||||
|
m4tu4g
|
||||||
|
MarwenDallel
|
||||||
|
nevack
|
||||||
|
putnam
|
||||||
|
rand-net
|
||||||
|
vertan
|
||||||
|
Wikidepia
|
||||||
|
Yipten
|
||||||
|
moench-tegeder
|
||||||
|
christoph-heinrich
|
||||||
|
HobbyistDev
|
||||||
|
LunarFang416
|
||||||
|
sbor23
|
||||||
|
aurelg
|
||||||
|
adamanldo
|
||||||
|
gamer191
|
||||||
|
vkorablin
|
||||||
|
Burve
|
||||||
|
mnn
|
||||||
|
ZhymabekRoman
|
||||||
|
mozbugbox
|
||||||
|
aejdl
|
||||||
|
ping
|
||||||
|
sqrtNOT
|
||||||
|
bubbleguuum
|
||||||
|
darkxex
|
||||||
|
miseran
|
||||||
|
StefanLobbenmeier
|
||||||
|
crazymoose77756
|
||||||
|
nomevi
|
||||||
|
Brett824
|
||||||
|
pingiun
|
||||||
|
dosy4ev
|
||||||
|
EhtishamSabir
|
||||||
|
Ferdi265
|
||||||
|
FirefoxMetzger
|
||||||
|
ftk
|
||||||
|
lamby
|
||||||
|
llamasblade
|
||||||
|
lockmatrix
|
||||||
|
misaelaguayo
|
||||||
|
odo2063
|
||||||
|
pritam20ps05
|
||||||
|
scy
|
||||||
|
sheerluck
|
||||||
|
AxiosDeminence
|
||||||
|
DjesonPV
|
||||||
|
eren-kemer
|
||||||
|
freezboltz
|
||||||
|
Galiley
|
||||||
|
haobinliang
|
||||||
|
Mehavoid
|
||||||
|
winterbird-code
|
||||||
|
yashkc2025
|
||||||
|
aldoridhoni
|
||||||
|
jacobtruman
|
||||||
|
masta79
|
||||||
|
palewire
|
||||||
|
cgrigis
|
||||||
|
DavidH-2022
|
||||||
|
dfaker
|
||||||
|
jackyyf
|
||||||
|
ohaiibuzzle
|
||||||
|
SamantazFox
|
||||||
|
shreyasminocha
|
||||||
|
tejasa97
|
||||||
|
xenov
|
||||||
|
satan1st
|
||||||
|
0xGodspeed
|
||||||
|
5736d79
|
||||||
|
587021c
|
||||||
|
basrieter
|
||||||
|
Bobscorn
|
||||||
|
CNugteren
|
||||||
|
columndeeply
|
||||||
|
DoubleCouponDay
|
||||||
|
Fabi019
|
||||||
|
GautamMKGarg
|
||||||
|
itachi-19
|
||||||
|
jeroenj
|
||||||
|
josanabr
|
||||||
|
LiviaMedeiros
|
||||||
|
nikita-moor
|
||||||
|
snapdgn
|
||||||
|
SuperSonicHub1
|
||||||
|
tannertechnology
|
||||||
|
Timendum
|
||||||
|
tobi1805
|
||||||
|
TokyoBlackHole
|
||||||
|
ajayyy
|
||||||
|
Alienmaster
|
||||||
|
bsun0000
|
||||||
|
changren-wcr
|
||||||
|
ClosedPort22
|
||||||
|
CrankDatSouljaBoy
|
||||||
|
cruel-efficiency
|
||||||
|
endotronic
|
||||||
|
Generator
|
||||||
|
gibson042
|
||||||
|
How-Bout-No
|
||||||
|
invertico
|
||||||
|
jahway603
|
||||||
|
jwoglom
|
||||||
|
lksj
|
||||||
|
megapro17
|
||||||
|
mlampe
|
||||||
|
MrOctopus
|
||||||
|
nosoop
|
||||||
|
puc9
|
||||||
|
sashashura
|
||||||
|
schnusch
|
||||||
|
SG5
|
||||||
|
the-marenga
|
||||||
|
tkgmomosheep
|
||||||
|
vitkhab
|
||||||
|
glensc
|
||||||
|
synthpop123
|
||||||
|
tntmod54321
|
||||||
|
milkknife
|
||||||
|
Bnyro
|
||||||
|
CapacitorSet
|
||||||
|
stelcodes
|
||||||
|
skbeh
|
||||||
|
muddi900
|
||||||
|
digitall
|
||||||
|
chengzhicn
|
||||||
|
mexus
|
||||||
|
JChris246
|
||||||
|
redraskal
|
||||||
|
Spicadox
|
||||||
|
barsnick
|
||||||
|
docbender
|
||||||
|
KurtBestor
|
||||||
|
Chrissi2812
|
||||||
|
FrederikNS
|
||||||
|
gschizas
|
||||||
|
JC-Chung
|
||||||
|
mzhou
|
||||||
|
OndrejBakan
|
||||||
|
ab4cbef
|
||||||
|
aionescu
|
||||||
|
amra
|
||||||
|
ByteDream
|
||||||
|
carusocr
|
||||||
|
chexxor
|
||||||
|
felixonmars
|
||||||
|
FrankZ85
|
||||||
|
FriedrichRehren
|
||||||
|
gregsadetsky
|
||||||
|
LeoniePhiline
|
||||||
|
LowSuggestion912
|
||||||
|
Matumo
|
||||||
|
OIRNOIR
|
||||||
|
OMEGARAZER
|
||||||
|
oxamun
|
||||||
|
pmitchell86
|
||||||
|
qbnu
|
||||||
|
qulaz
|
||||||
|
rebane2001
|
||||||
|
road-master
|
||||||
|
rohieb
|
||||||
|
sdht0
|
||||||
|
seproDev
|
||||||
|
Hill-98
|
||||||
|
LXYan2333
|
||||||
|
mushbite
|
||||||
|
venkata-krishnas
|
||||||
|
7vlad7
|
||||||
|
alexklapheke
|
||||||
|
arobase-che
|
||||||
|
bepvte
|
||||||
|
bergoid
|
||||||
|
blmarket
|
||||||
|
brandon-dacrib
|
||||||
|
c-basalt
|
||||||
|
CoryTibbettsDev
|
||||||
|
Cyberes
|
||||||
|
D0LLYNH0
|
||||||
|
danog
|
||||||
|
DataGhost
|
||||||
|
falbrechtskirchinger
|
||||||
|
foreignBlade
|
||||||
|
garret1317
|
||||||
|
hasezoey
|
||||||
|
hoaluvn
|
||||||
|
ItzMaxTV
|
||||||
|
ivanskodje
|
||||||
|
jo-nike
|
||||||
|
kangalio
|
||||||
|
linsui
|
||||||
|
makew0rld
|
||||||
|
menschel
|
||||||
|
mikf
|
||||||
|
mrscrapy
|
||||||
|
NDagestad
|
||||||
|
Neurognostic
|
||||||
|
NextFire
|
||||||
|
nick-cd
|
||||||
|
permunkle
|
||||||
|
pzhlkj6612
|
||||||
|
ringus1
|
||||||
|
rjy
|
||||||
|
Schmoaaaaah
|
||||||
|
sjthespian
|
||||||
|
theperfectpunk
|
||||||
|
toomyzoom
|
||||||
|
truedread
|
||||||
|
TxI5
|
||||||
|
unbeatable-101
|
||||||
|
vampirefrog
|
||||||
|
vidiot720
|
||||||
|
viktor-enzell
|
||||||
|
zhgwn
|
||||||
|
barthelmannk
|
||||||
|
berkanteber
|
||||||
|
OverlordQ
|
||||||
|
rexlambert22
|
||||||
|
Ti4eeT4e
|
||||||
|
AmanSal1
|
||||||
|
bbilly1
|
||||||
|
meliber
|
||||||
|
nnoboa
|
||||||
|
rdamas
|
||||||
|
RfadnjdExt
|
||||||
|
urectanc
|
||||||
|
nao20010128nao/Lesmiscore
|
||||||
|
04-pasha-04
|
||||||
|
aaruni96
|
||||||
|
aky-01
|
||||||
|
AmirAflak
|
||||||
|
ApoorvShah111
|
||||||
|
at-wat
|
||||||
|
davinkevin
|
||||||
|
demon071
|
||||||
|
denhotte
|
||||||
|
FinnRG
|
||||||
|
fireattack
|
||||||
|
Frankgoji
|
||||||
|
GD-Slime
|
||||||
|
hatsomatt
|
||||||
|
ifan-t
|
||||||
|
kshitiz305
|
||||||
|
kylegustavo
|
||||||
|
mabdelfattah
|
||||||
|
nathantouze
|
||||||
|
niemands
|
||||||
|
Rajeshwaran2001
|
||||||
|
RedDeffender
|
||||||
|
Rohxn16
|
||||||
|
sb0stn
|
||||||
|
SevenLives
|
||||||
|
simon300000
|
||||||
|
snixon
|
||||||
|
soundchaser128
|
||||||
|
szabyg
|
||||||
|
trainman261
|
||||||
|
trislee
|
||||||
|
wader
|
||||||
|
Yalab7
|
||||||
|
zhallgato
|
||||||
|
zhong-yiyu
|
||||||
|
Zprokkel
|
||||||
|
AS6939
|
||||||
|
drzraf
|
||||||
|
handlerug
|
||||||
|
jiru
|
||||||
|
madewokherd
|
||||||
|
xofe
|
||||||
|
awalgarg
|
||||||
|
midnightveil
|
||||||
|
naginatana
|
||||||
|
Riteo
|
||||||
|
1100101
|
||||||
|
aniolpages
|
||||||
|
bartbroere
|
||||||
|
CrendKing
|
||||||
|
Esokrates
|
||||||
|
HitomaruKonpaku
|
||||||
|
LoserFox
|
||||||
|
peci1
|
||||||
|
saintliao
|
||||||
|
shubhexists
|
||||||
|
SirElderling
|
||||||
|
almx
|
||||||
|
elivinsky
|
||||||
|
starius
|
||||||
|
TravisDupes
|
||||||
|
amir16yp
|
||||||
|
Fymyte
|
||||||
|
Ganesh910
|
||||||
|
hashFactory
|
||||||
|
kclauhk
|
||||||
|
Kyraminol
|
||||||
|
lstrojny
|
||||||
|
middlingphys
|
||||||
|
NickCis
|
||||||
|
nicodato
|
||||||
|
prettykool
|
||||||
|
S-Aarab
|
||||||
|
sonmezberkay
|
||||||
|
TSRBerry
|
||||||
|
114514ns
|
||||||
|
agibson-fl
|
||||||
|
alard
|
||||||
|
alien-developers
|
||||||
|
antonkesy
|
||||||
|
ArnauvGilotra
|
||||||
|
Arthurszzz
|
||||||
|
Bibhav48
|
||||||
|
Bl4Cc4t
|
||||||
|
boredzo
|
||||||
|
Caesim404
|
||||||
|
chkuendig
|
||||||
|
chtk
|
||||||
|
Danish-H
|
||||||
|
dasidiot
|
||||||
|
diman8
|
||||||
|
divStar
|
||||||
|
DmitryScaletta
|
||||||
|
feederbox826
|
||||||
|
gmes78
|
||||||
|
gonzalezjo
|
||||||
|
hui1601
|
||||||
|
infanf
|
||||||
|
jazz1611
|
||||||
|
jingtra
|
||||||
|
jkmartindale
|
||||||
|
johnvictorfs
|
||||||
|
llistochek
|
||||||
|
marcdumais
|
||||||
|
martinxyz
|
||||||
|
michal-repo
|
||||||
|
mrmedieval
|
||||||
|
nbr23
|
||||||
|
Nicals
|
||||||
|
Noor-5
|
||||||
|
NurTasin
|
||||||
|
pompos02
|
||||||
|
Pranaxcau
|
||||||
|
pwaldhauer
|
||||||
|
RaduManole
|
||||||
|
RalphORama
|
||||||
|
rrgomes
|
||||||
|
ruiminggu
|
||||||
|
rvsit
|
||||||
|
sefidel
|
||||||
|
shmohawk
|
||||||
|
Snack-X
|
||||||
|
src-tinkerer
|
||||||
|
stilor
|
||||||
|
syntaxsurge
|
||||||
|
t-nil
|
||||||
|
ufukk
|
||||||
|
vista-narvas
|
||||||
|
x11x
|
||||||
|
xpadev-net
|
||||||
|
Xpl0itU
|
||||||
|
YoshichikaAAA
|
||||||
|
zhijinwuu
|
||||||
|
alb
|
||||||
|
hruzgar
|
||||||
|
kasper93
|
||||||
|
leoheitmannruiz
|
||||||
|
luiso1979
|
||||||
|
nipotan
|
||||||
|
Offert4324
|
||||||
|
sta1us
|
||||||
|
Tomoka1
|
||||||
|
trwstin
|
||||||
|
alexhuot1
|
||||||
|
clienthax
|
||||||
|
DaPotato69
|
||||||
|
emqi
|
||||||
|
hugohaa
|
||||||
|
imanoreotwe
|
||||||
|
JakeFinley96
|
||||||
|
lostfictions
|
||||||
|
minamotorin
|
||||||
|
ocococococ
|
||||||
|
Podiumnoche
|
||||||
|
RasmusAntons
|
||||||
|
roeniss
|
||||||
|
shoxie007
|
||||||
|
Szpachlarz
|
||||||
|
The-MAGI
|
||||||
|
TuxCoder
|
||||||
|
voidful
|
||||||
|
vtexier
|
||||||
|
WyohKnott
|
||||||
|
|||||||
2344
Changelog.md
2344
Changelog.md
File diff suppressed because it is too large
Load Diff
@@ -8,6 +8,7 @@ You can also find lists of all [contributors of yt-dlp](CONTRIBUTORS) and [autho
|
|||||||
## [pukkandan](https://github.com/pukkandan)
|
## [pukkandan](https://github.com/pukkandan)
|
||||||
|
|
||||||
[](https://ko-fi.com/pukkandan)
|
[](https://ko-fi.com/pukkandan)
|
||||||
|
[](https://github.com/sponsors/pukkandan)
|
||||||
|
|
||||||
* Owner of the fork
|
* Owner of the fork
|
||||||
|
|
||||||
@@ -25,15 +26,17 @@ You can also find lists of all [contributors of yt-dlp](CONTRIBUTORS) and [autho
|
|||||||
|
|
||||||
## [coletdjnz](https://github.com/coletdjnz)
|
## [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
|
||||||
[](https://github.com/sponsors/coletdjnz)
|
[](https://github.com/sponsors/coletdjnz)
|
||||||
|
|
||||||
|
* Improved plugin architecture
|
||||||
|
* Rewrote the networking infrastructure, implemented support for `requests`
|
||||||
* YouTube improvements including: age-gate bypass, private playlists, multiple-clients (to avoid throttling) and a lot of under-the-hood improvements
|
* YouTube improvements including: age-gate bypass, private playlists, multiple-clients (to avoid throttling) and a lot of under-the-hood improvements
|
||||||
* Added support for downloading YoutubeWebArchive videos
|
* Added support for new websites YoutubeWebArchive, MainStreaming, PRX, nzherald, Mediaklikk, StarTV etc
|
||||||
* Added support for new websites MainStreaming, PRX, nzherald, etc
|
* Improved/fixed support for Patreon, panopto, gfycat, itv, pbs, SouthParkDE etc
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## [Ashish0804](https://github.com/Ashish0804)
|
## [Ashish0804](https://github.com/Ashish0804) <sub><sup>[Inactive]</sup></sub>
|
||||||
|
|
||||||
[](https://ko-fi.com/ashish0804)
|
[](https://ko-fi.com/ashish0804)
|
||||||
|
|
||||||
@@ -42,10 +45,19 @@ You can also find lists of all [contributors of yt-dlp](CONTRIBUTORS) and [autho
|
|||||||
* Improved/fixed support for HiDive, HotStar, Hungama, LBRY, LinkedInLearning, Mxplayer, SonyLiv, TV2, Vimeo, VLive etc
|
* Improved/fixed support for HiDive, HotStar, Hungama, LBRY, LinkedInLearning, Mxplayer, SonyLiv, TV2, Vimeo, VLive etc
|
||||||
|
|
||||||
|
|
||||||
## [Lesmiscore](https://github.com/Lesmiscore) (nao20010128nao)
|
## [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
**Bitcoin**: bc1qfd02r007cutfdjwjmyy9w23rjvtls6ncve7r3s
|
* `--update-to`, self-updater rewrite, automated/nightly/master releases
|
||||||
**Monacoin**: mona1q3tf7dzvshrhfe3md379xtvt2n22duhglv5dskr
|
* `--cookies-from-browser` support for Firefox containers, external downloader cookie handling overhaul
|
||||||
|
* Added support for new websites like Dacast, Kick, NBCStations, Triller, VideoKen, Weverse, WrestleUniverse etc
|
||||||
|
* Improved/fixed support for Anvato, Brightcove, Reddit, SlidesLive, TikTok, Twitter, Vimeo etc
|
||||||
|
|
||||||
* Download live from start to end for YouTube
|
|
||||||
* Added support for new websites mildom, PixivSketch, skeb, radiko, voicy, mirrativ, openrec, whowatch, damtomo, 17.live, mixch etc
|
## [Grub4K](https://github.com/Grub4K)
|
||||||
|
|
||||||
|
[](https://github.com/sponsors/Grub4K) [](https://ko-fi.com/Grub4K)
|
||||||
|
|
||||||
|
* `--update-to`, self-updater rewrite, automated/nightly/master releases
|
||||||
|
* Reworked internals like `traverse_obj`, various core refactors and bugs fixes
|
||||||
|
* Implemented proper progress reporting for parallel downloads
|
||||||
|
* Improved/fixed/added Bundestag, crunchyroll, pr0gramm, Twitter, WrestleUniverse etc
|
||||||
|
|||||||
10
MANIFEST.in
10
MANIFEST.in
@@ -1,10 +0,0 @@
|
|||||||
include AUTHORS
|
|
||||||
include Changelog.md
|
|
||||||
include LICENSE
|
|
||||||
include README.md
|
|
||||||
include completions/*/*
|
|
||||||
include supportedsites.md
|
|
||||||
include yt-dlp.1
|
|
||||||
include requirements.txt
|
|
||||||
recursive-include devscripts *
|
|
||||||
recursive-include test *
|
|
||||||
121
Makefile
121
Makefile
@@ -2,27 +2,33 @@ all: lazy-extractors yt-dlp doc pypi-files
|
|||||||
clean: clean-test clean-dist
|
clean: clean-test clean-dist
|
||||||
clean-all: clean clean-cache
|
clean-all: clean clean-cache
|
||||||
completions: completion-bash completion-fish completion-zsh
|
completions: completion-bash completion-fish completion-zsh
|
||||||
doc: README.md CONTRIBUTING.md issuetemplates supportedsites
|
doc: README.md CONTRIBUTING.md CONTRIBUTORS issuetemplates supportedsites
|
||||||
ot: offlinetest
|
ot: offlinetest
|
||||||
tar: yt-dlp.tar.gz
|
tar: yt-dlp.tar.gz
|
||||||
|
|
||||||
# Keep this list in sync with MANIFEST.in
|
# Keep this list in sync with pyproject.toml includes/artifacts
|
||||||
# intended use: when building a source distribution,
|
# intended use: when building a source distribution,
|
||||||
# make pypi-files && python setup.py sdist
|
# make pypi-files && python3 -m build -sn .
|
||||||
pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites completions yt-dlp.1 devscripts/* test/*
|
pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites \
|
||||||
|
completions yt-dlp.1 pyproject.toml setup.cfg devscripts/* test/*
|
||||||
|
|
||||||
.PHONY: all clean install test tar pypi-files completions ot offlinetest codetest supportedsites
|
.PHONY: all clean clean-all clean-test clean-dist clean-cache \
|
||||||
|
completions completion-bash completion-fish completion-zsh \
|
||||||
|
doc issuetemplates supportedsites ot offlinetest codetest test \
|
||||||
|
tar pypi-files lazy-extractors install uninstall
|
||||||
|
|
||||||
clean-test:
|
clean-test:
|
||||||
rm -rf test/testdata/sigs/player-*.js tmp/ *.annotations.xml *.aria2 *.description *.dump *.frag \
|
rm -rf test/testdata/sigs/player-*.js tmp/ *.annotations.xml *.aria2 *.description *.dump *.frag \
|
||||||
*.frag.aria2 *.frag.urls *.info.json *.live_chat.json *.meta *.part* *.tmp *.temp *.unknown_video *.ytdl \
|
*.frag.aria2 *.frag.urls *.info.json *.live_chat.json *.meta *.part* *.tmp *.temp *.unknown_video *.ytdl \
|
||||||
*.3gp *.ape *.ass *.avi *.desktop *.flac *.flv *.jpeg *.jpg *.m4a *.m4v *.mhtml *.mkv *.mov *.mp3 \
|
*.3gp *.ape *.ass *.avi *.desktop *.f4v *.flac *.flv *.gif *.jpeg *.jpg *.lrc *.m4a *.m4v *.mhtml *.mkv *.mov *.mp3 *.mp4 \
|
||||||
*.mp4 *.ogg *.opus *.png *.sbv *.srt *.swf *.swp *.ttml *.url *.vtt *.wav *.webloc *.webm *.webp
|
*.mpg *.mpga *.oga *.ogg *.opus *.png *.sbv *.srt *.ssa *.swf *.swp *.tt *.ttml *.url *.vtt *.wav *.webloc *.webm *.webp
|
||||||
clean-dist:
|
clean-dist:
|
||||||
rm -rf yt-dlp.1.temp.md yt-dlp.1 README.txt MANIFEST build/ dist/ .coverage cover/ yt-dlp.tar.gz completions/ \
|
rm -rf yt-dlp.1.temp.md yt-dlp.1 README.txt MANIFEST build/ dist/ .coverage cover/ yt-dlp.tar.gz completions/ \
|
||||||
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS .mailmap
|
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS
|
||||||
clean-cache:
|
clean-cache:
|
||||||
find . \( -name "*.pyc" -o -name "*.class" \) -delete
|
find . \( \
|
||||||
|
-type d -name ".*_cache" -o -type d -name __pycache__ -o -name "*.pyc" -o -name "*.class" \
|
||||||
|
\) -prune -exec rm -rf {} \;
|
||||||
|
|
||||||
completion-bash: completions/bash/yt-dlp
|
completion-bash: completions/bash/yt-dlp
|
||||||
completion-fish: completions/fish/yt-dlp.fish
|
completion-fish: completions/fish/yt-dlp.fish
|
||||||
@@ -30,54 +36,75 @@ completion-zsh: completions/zsh/_yt-dlp
|
|||||||
lazy-extractors: yt_dlp/extractor/lazy_extractors.py
|
lazy-extractors: yt_dlp/extractor/lazy_extractors.py
|
||||||
|
|
||||||
PREFIX ?= /usr/local
|
PREFIX ?= /usr/local
|
||||||
DESTDIR ?= .
|
|
||||||
BINDIR ?= $(PREFIX)/bin
|
BINDIR ?= $(PREFIX)/bin
|
||||||
MANDIR ?= $(PREFIX)/man
|
MANDIR ?= $(PREFIX)/man
|
||||||
SHAREDIR ?= $(PREFIX)/share
|
SHAREDIR ?= $(PREFIX)/share
|
||||||
PYTHON ?= /usr/bin/env python3
|
PYTHON ?= /usr/bin/env python3
|
||||||
|
GNUTAR ?= tar
|
||||||
|
|
||||||
# set SYSCONFDIR to /etc if PREFIX=/usr or PREFIX=/usr/local
|
# set markdown input format to "markdown-smart" for pandoc version 2+ and to "markdown" for pandoc prior to version 2
|
||||||
SYSCONFDIR = $(shell if [ $(PREFIX) = /usr -o $(PREFIX) = /usr/local ]; then echo /etc; else echo $(PREFIX)/etc; fi)
|
PANDOC_VERSION_CMD = pandoc -v 2>/dev/null | head -n1 | cut -d' ' -f2 | head -c1
|
||||||
|
PANDOC_VERSION != $(PANDOC_VERSION_CMD)
|
||||||
# set markdown input format to "markdown-smart" for pandoc version 2 and to "markdown" for pandoc prior to version 2
|
PANDOC_VERSION ?= $(shell $(PANDOC_VERSION_CMD))
|
||||||
MARKDOWN = $(shell if [ `pandoc -v | head -n1 | cut -d" " -f2 | head -c1` = "2" ]; then echo markdown-smart; else echo markdown; fi)
|
MARKDOWN_CMD = if [ "$(PANDOC_VERSION)" = "1" -o "$(PANDOC_VERSION)" = "0" ]; then echo markdown; else echo markdown-smart; fi
|
||||||
|
MARKDOWN != $(MARKDOWN_CMD)
|
||||||
|
MARKDOWN ?= $(shell $(MARKDOWN_CMD))
|
||||||
|
|
||||||
install: lazy-extractors yt-dlp yt-dlp.1 completions
|
install: lazy-extractors yt-dlp yt-dlp.1 completions
|
||||||
install -Dm755 yt-dlp $(DESTDIR)$(BINDIR)/yt-dlp
|
mkdir -p $(DESTDIR)$(BINDIR)
|
||||||
install -Dm644 yt-dlp.1 $(DESTDIR)$(MANDIR)/man1/yt-dlp.1
|
install -m755 yt-dlp $(DESTDIR)$(BINDIR)/yt-dlp
|
||||||
install -Dm644 completions/bash/yt-dlp $(DESTDIR)$(SHAREDIR)/bash-completion/completions/yt-dlp
|
mkdir -p $(DESTDIR)$(MANDIR)/man1
|
||||||
install -Dm644 completions/zsh/_yt-dlp $(DESTDIR)$(SHAREDIR)/zsh/site-functions/_yt-dlp
|
install -m644 yt-dlp.1 $(DESTDIR)$(MANDIR)/man1/yt-dlp.1
|
||||||
install -Dm644 completions/fish/yt-dlp.fish $(DESTDIR)$(SHAREDIR)/fish/vendor_completions.d/yt-dlp.fish
|
mkdir -p $(DESTDIR)$(SHAREDIR)/bash-completion/completions
|
||||||
|
install -m644 completions/bash/yt-dlp $(DESTDIR)$(SHAREDIR)/bash-completion/completions/yt-dlp
|
||||||
|
mkdir -p $(DESTDIR)$(SHAREDIR)/zsh/site-functions
|
||||||
|
install -m644 completions/zsh/_yt-dlp $(DESTDIR)$(SHAREDIR)/zsh/site-functions/_yt-dlp
|
||||||
|
mkdir -p $(DESTDIR)$(SHAREDIR)/fish/vendor_completions.d
|
||||||
|
install -m644 completions/fish/yt-dlp.fish $(DESTDIR)$(SHAREDIR)/fish/vendor_completions.d/yt-dlp.fish
|
||||||
|
|
||||||
|
uninstall:
|
||||||
|
rm -f $(DESTDIR)$(BINDIR)/yt-dlp
|
||||||
|
rm -f $(DESTDIR)$(MANDIR)/man1/yt-dlp.1
|
||||||
|
rm -f $(DESTDIR)$(SHAREDIR)/bash-completion/completions/yt-dlp
|
||||||
|
rm -f $(DESTDIR)$(SHAREDIR)/zsh/site-functions/_yt-dlp
|
||||||
|
rm -f $(DESTDIR)$(SHAREDIR)/fish/vendor_completions.d/yt-dlp.fish
|
||||||
|
|
||||||
codetest:
|
codetest:
|
||||||
flake8 .
|
ruff check .
|
||||||
|
autopep8 --diff .
|
||||||
|
|
||||||
test:
|
test:
|
||||||
$(PYTHON) -m pytest
|
$(PYTHON) -m pytest -Werror
|
||||||
$(MAKE) codetest
|
$(MAKE) codetest
|
||||||
|
|
||||||
offlinetest: codetest
|
offlinetest: codetest
|
||||||
$(PYTHON) -m pytest -k "not download"
|
$(PYTHON) -m pytest -Werror -m "not download"
|
||||||
|
|
||||||
yt-dlp: yt_dlp/*.py yt_dlp/*/*.py
|
CODE_FOLDERS_CMD = find yt_dlp -type f -name '__init__.py' | sed 's,/__init__.py,,' | grep -v '/__' | sort
|
||||||
|
CODE_FOLDERS != $(CODE_FOLDERS_CMD)
|
||||||
|
CODE_FOLDERS ?= $(shell $(CODE_FOLDERS_CMD))
|
||||||
|
CODE_FILES_CMD = for f in $(CODE_FOLDERS) ; do echo "$$f" | sed 's,$$,/*.py,' ; done
|
||||||
|
CODE_FILES != $(CODE_FILES_CMD)
|
||||||
|
CODE_FILES ?= $(shell $(CODE_FILES_CMD))
|
||||||
|
yt-dlp: $(CODE_FILES)
|
||||||
mkdir -p zip
|
mkdir -p zip
|
||||||
for d in yt_dlp yt_dlp/downloader yt_dlp/extractor yt_dlp/postprocessor ; do \
|
for d in $(CODE_FOLDERS) ; do \
|
||||||
mkdir -p zip/$$d ;\
|
mkdir -p zip/$$d ;\
|
||||||
cp -pPR $$d/*.py zip/$$d/ ;\
|
cp -pPR $$d/*.py zip/$$d/ ;\
|
||||||
done
|
done
|
||||||
touch -t 200001010101 zip/yt_dlp/*.py zip/yt_dlp/*/*.py
|
(cd zip && touch -t 200001010101 $(CODE_FILES))
|
||||||
mv zip/yt_dlp/__main__.py zip/
|
mv zip/yt_dlp/__main__.py zip/
|
||||||
cd zip ; zip -q ../yt-dlp yt_dlp/*.py yt_dlp/*/*.py __main__.py
|
(cd zip && zip -q ../yt-dlp $(CODE_FILES) __main__.py)
|
||||||
rm -rf zip
|
rm -rf zip
|
||||||
echo '#!$(PYTHON)' > yt-dlp
|
echo '#!$(PYTHON)' > yt-dlp
|
||||||
cat yt-dlp.zip >> yt-dlp
|
cat yt-dlp.zip >> yt-dlp
|
||||||
rm yt-dlp.zip
|
rm yt-dlp.zip
|
||||||
chmod a+x yt-dlp
|
chmod a+x yt-dlp
|
||||||
|
|
||||||
README.md: yt_dlp/*.py yt_dlp/*/*.py
|
README.md: $(CODE_FILES) devscripts/make_readme.py
|
||||||
COLUMNS=80 $(PYTHON) yt_dlp/__main__.py --help | $(PYTHON) devscripts/make_readme.py
|
COLUMNS=80 $(PYTHON) yt_dlp/__main__.py --ignore-config --help | $(PYTHON) devscripts/make_readme.py
|
||||||
|
|
||||||
CONTRIBUTING.md: README.md
|
CONTRIBUTING.md: README.md devscripts/make_contributing.py
|
||||||
$(PYTHON) devscripts/make_contributing.py README.md CONTRIBUTING.md
|
$(PYTHON) devscripts/make_contributing.py README.md CONTRIBUTING.md
|
||||||
|
|
||||||
issuetemplates: devscripts/make_issue_template.py .github/ISSUE_TEMPLATE_tmpl/1_broken_site.yml .github/ISSUE_TEMPLATE_tmpl/2_site_support_request.yml .github/ISSUE_TEMPLATE_tmpl/3_site_feature_request.yml .github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml .github/ISSUE_TEMPLATE_tmpl/5_feature_request.yml yt_dlp/version.py
|
issuetemplates: devscripts/make_issue_template.py .github/ISSUE_TEMPLATE_tmpl/1_broken_site.yml .github/ISSUE_TEMPLATE_tmpl/2_site_support_request.yml .github/ISSUE_TEMPLATE_tmpl/3_site_feature_request.yml .github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml .github/ISSUE_TEMPLATE_tmpl/5_feature_request.yml yt_dlp/version.py
|
||||||
@@ -94,45 +121,53 @@ supportedsites:
|
|||||||
README.txt: README.md
|
README.txt: README.md
|
||||||
pandoc -f $(MARKDOWN) -t plain README.md -o README.txt
|
pandoc -f $(MARKDOWN) -t plain README.md -o README.txt
|
||||||
|
|
||||||
yt-dlp.1: README.md
|
yt-dlp.1: README.md devscripts/prepare_manpage.py
|
||||||
$(PYTHON) devscripts/prepare_manpage.py yt-dlp.1.temp.md
|
$(PYTHON) devscripts/prepare_manpage.py yt-dlp.1.temp.md
|
||||||
pandoc -s -f $(MARKDOWN) -t man yt-dlp.1.temp.md -o yt-dlp.1
|
pandoc -s -f $(MARKDOWN) -t man yt-dlp.1.temp.md -o yt-dlp.1
|
||||||
rm -f yt-dlp.1.temp.md
|
rm -f yt-dlp.1.temp.md
|
||||||
|
|
||||||
completions/bash/yt-dlp: yt_dlp/*.py yt_dlp/*/*.py devscripts/bash-completion.in
|
completions/bash/yt-dlp: $(CODE_FILES) devscripts/bash-completion.in
|
||||||
mkdir -p completions/bash
|
mkdir -p completions/bash
|
||||||
$(PYTHON) devscripts/bash-completion.py
|
$(PYTHON) devscripts/bash-completion.py
|
||||||
|
|
||||||
completions/zsh/_yt-dlp: yt_dlp/*.py yt_dlp/*/*.py devscripts/zsh-completion.in
|
completions/zsh/_yt-dlp: $(CODE_FILES) devscripts/zsh-completion.in
|
||||||
mkdir -p completions/zsh
|
mkdir -p completions/zsh
|
||||||
$(PYTHON) devscripts/zsh-completion.py
|
$(PYTHON) devscripts/zsh-completion.py
|
||||||
|
|
||||||
completions/fish/yt-dlp.fish: yt_dlp/*.py yt_dlp/*/*.py devscripts/fish-completion.in
|
completions/fish/yt-dlp.fish: $(CODE_FILES) devscripts/fish-completion.in
|
||||||
mkdir -p completions/fish
|
mkdir -p completions/fish
|
||||||
$(PYTHON) devscripts/fish-completion.py
|
$(PYTHON) devscripts/fish-completion.py
|
||||||
|
|
||||||
_EXTRACTOR_FILES = $(shell find yt_dlp/extractor -iname '*.py' -and -not -iname 'lazy_extractors.py')
|
_EXTRACTOR_FILES_CMD = find yt_dlp/extractor -name '*.py' -and -not -name 'lazy_extractors.py'
|
||||||
|
_EXTRACTOR_FILES != $(_EXTRACTOR_FILES_CMD)
|
||||||
|
_EXTRACTOR_FILES ?= $(shell $(_EXTRACTOR_FILES_CMD))
|
||||||
yt_dlp/extractor/lazy_extractors.py: devscripts/make_lazy_extractors.py devscripts/lazy_load_template.py $(_EXTRACTOR_FILES)
|
yt_dlp/extractor/lazy_extractors.py: devscripts/make_lazy_extractors.py devscripts/lazy_load_template.py $(_EXTRACTOR_FILES)
|
||||||
$(PYTHON) devscripts/make_lazy_extractors.py $@
|
$(PYTHON) devscripts/make_lazy_extractors.py $@
|
||||||
|
|
||||||
yt-dlp.tar.gz: all
|
yt-dlp.tar.gz: all
|
||||||
@tar -czf $(DESTDIR)/yt-dlp.tar.gz --transform "s|^|yt-dlp/|" --owner 0 --group 0 \
|
@$(GNUTAR) -czf yt-dlp.tar.gz --transform "s|^|yt-dlp/|" --owner 0 --group 0 \
|
||||||
--exclude '*.DS_Store' \
|
--exclude '*.DS_Store' \
|
||||||
--exclude '*.kate-swp' \
|
--exclude '*.kate-swp' \
|
||||||
--exclude '*.pyc' \
|
--exclude '*.pyc' \
|
||||||
--exclude '*.pyo' \
|
--exclude '*.pyo' \
|
||||||
--exclude '*~' \
|
--exclude '*~' \
|
||||||
--exclude '__pycache__' \
|
--exclude '__pycache__' \
|
||||||
|
--exclude '.*_cache' \
|
||||||
--exclude '.git' \
|
--exclude '.git' \
|
||||||
-- \
|
-- \
|
||||||
README.md supportedsites.md Changelog.md LICENSE \
|
README.md supportedsites.md Changelog.md LICENSE \
|
||||||
CONTRIBUTING.md Collaborators.md CONTRIBUTORS AUTHORS \
|
CONTRIBUTING.md Collaborators.md CONTRIBUTORS AUTHORS \
|
||||||
Makefile MANIFEST.in yt-dlp.1 README.txt completions \
|
Makefile yt-dlp.1 README.txt completions .gitignore \
|
||||||
setup.py setup.cfg yt-dlp yt_dlp requirements.txt \
|
setup.cfg yt-dlp yt_dlp pyproject.toml devscripts test
|
||||||
devscripts test tox.ini pytest.ini
|
|
||||||
|
|
||||||
AUTHORS: .mailmap
|
AUTHORS: Changelog.md
|
||||||
git shortlog -s -n | cut -f2 | sort > AUTHORS
|
@if [ -d '.git' ] && command -v git > /dev/null ; then \
|
||||||
|
echo 'Generating $@ from git commit history' ; \
|
||||||
|
git shortlog -s -n HEAD | cut -f2 | sort > $@ ; \
|
||||||
|
fi
|
||||||
|
|
||||||
.mailmap:
|
CONTRIBUTORS: Changelog.md
|
||||||
git shortlog -s -e -n | awk '!(out[$$NF]++) { $$1="";sub(/^[ \t]+/,""); print}' > .mailmap
|
@if [ -d '.git' ] && command -v git > /dev/null ; then \
|
||||||
|
echo 'Updating $@ from git commit history' ; \
|
||||||
|
$(PYTHON) devscripts/make_changelog.py -v -c > /dev/null ; \
|
||||||
|
fi
|
||||||
|
|||||||
0
bundle/__init__.py
Normal file
0
bundle/__init__.py
Normal file
10
bundle/docker/compose.yml
Normal file
10
bundle/docker/compose.yml
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
services:
|
||||||
|
static:
|
||||||
|
build: static
|
||||||
|
environment:
|
||||||
|
channel: ${channel}
|
||||||
|
origin: ${origin}
|
||||||
|
version: ${version}
|
||||||
|
volumes:
|
||||||
|
- ~/build:/build
|
||||||
|
- ../..:/yt-dlp
|
||||||
21
bundle/docker/static/Dockerfile
Normal file
21
bundle/docker/static/Dockerfile
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
FROM alpine:3.19 as base
|
||||||
|
|
||||||
|
RUN apk --update add --no-cache \
|
||||||
|
build-base \
|
||||||
|
python3 \
|
||||||
|
pipx \
|
||||||
|
;
|
||||||
|
|
||||||
|
RUN pipx install pyinstaller
|
||||||
|
# Requires above step to prepare the shared venv
|
||||||
|
RUN ~/.local/share/pipx/shared/bin/python -m pip install -U wheel
|
||||||
|
RUN apk --update add --no-cache \
|
||||||
|
scons \
|
||||||
|
patchelf \
|
||||||
|
binutils \
|
||||||
|
;
|
||||||
|
RUN pipx install staticx
|
||||||
|
|
||||||
|
WORKDIR /yt-dlp
|
||||||
|
COPY entrypoint.sh /entrypoint.sh
|
||||||
|
ENTRYPOINT /entrypoint.sh
|
||||||
13
bundle/docker/static/entrypoint.sh
Executable file
13
bundle/docker/static/entrypoint.sh
Executable file
@@ -0,0 +1,13 @@
|
|||||||
|
#!/bin/ash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
source ~/.local/share/pipx/venvs/pyinstaller/bin/activate
|
||||||
|
python -m devscripts.install_deps --include secretstorage
|
||||||
|
python -m devscripts.make_lazy_extractors
|
||||||
|
python devscripts/update-version.py -c "${channel}" -r "${origin}" "${version}"
|
||||||
|
python -m bundle.pyinstaller
|
||||||
|
deactivate
|
||||||
|
|
||||||
|
source ~/.local/share/pipx/venvs/staticx/bin/activate
|
||||||
|
staticx /yt-dlp/dist/yt-dlp_linux /build/yt-dlp_linux
|
||||||
|
deactivate
|
||||||
59
bundle/py2exe.py
Executable file
59
bundle/py2exe.py
Executable file
@@ -0,0 +1,59 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Allow execution from anywhere
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from py2exe import freeze
|
||||||
|
|
||||||
|
from devscripts.utils import read_version
|
||||||
|
|
||||||
|
VERSION = read_version()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
warnings.warn(
|
||||||
|
'py2exe builds do not support pycryptodomex and needs VC++14 to run. '
|
||||||
|
'It is recommended to run "pyinst.py" to build using pyinstaller instead')
|
||||||
|
|
||||||
|
freeze(
|
||||||
|
console=[{
|
||||||
|
'script': './yt_dlp/__main__.py',
|
||||||
|
'dest_base': 'yt-dlp',
|
||||||
|
'icon_resources': [(1, 'devscripts/logo.ico')],
|
||||||
|
}],
|
||||||
|
version_info={
|
||||||
|
'version': VERSION,
|
||||||
|
'description': 'A feature-rich command-line audio/video downloader',
|
||||||
|
'comments': 'Official repository: <https://github.com/yt-dlp/yt-dlp>',
|
||||||
|
'product_name': 'yt-dlp',
|
||||||
|
'product_version': VERSION,
|
||||||
|
},
|
||||||
|
options={
|
||||||
|
'bundle_files': 0,
|
||||||
|
'compressed': 1,
|
||||||
|
'optimize': 2,
|
||||||
|
'dist_dir': './dist',
|
||||||
|
'excludes': [
|
||||||
|
# py2exe cannot import Crypto
|
||||||
|
'Crypto',
|
||||||
|
'Cryptodome',
|
||||||
|
# py2exe appears to confuse this with our socks library.
|
||||||
|
# We don't use pysocks and urllib3.contrib.socks would fail to import if tried.
|
||||||
|
'urllib3.contrib.socks'
|
||||||
|
],
|
||||||
|
'dll_excludes': ['w9xpopen.exe', 'crypt32.dll'],
|
||||||
|
# Modules that are only imported dynamically must be added here
|
||||||
|
'includes': ['yt_dlp.compat._legacy', 'yt_dlp.compat._deprecated',
|
||||||
|
'yt_dlp.utils._legacy', 'yt_dlp.utils._deprecated'],
|
||||||
|
},
|
||||||
|
zipfile=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
113
pyinst.py → bundle/pyinstaller.py
Normal file → Executable file
113
pyinst.py → bundle/pyinstaller.py
Normal file → Executable file
@@ -1,34 +1,31 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# coding: utf-8
|
|
||||||
|
# Allow direct execution
|
||||||
import os
|
import os
|
||||||
import platform
|
|
||||||
import sys
|
import sys
|
||||||
from PyInstaller.utils.hooks import collect_submodules
|
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
OS_NAME = platform.system()
|
import platform
|
||||||
if OS_NAME == 'Windows':
|
|
||||||
from PyInstaller.utils.win32.versioninfo import (
|
|
||||||
VarStruct, VarFileInfo, StringStruct, StringTable,
|
|
||||||
StringFileInfo, FixedFileInfo, VSVersionInfo, SetVersion,
|
|
||||||
)
|
|
||||||
elif OS_NAME == 'Darwin':
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
raise Exception('{OS_NAME} is not supported')
|
|
||||||
|
|
||||||
ARCH = platform.architecture()[0][:2]
|
from PyInstaller.__main__ import run as run_pyinstaller
|
||||||
|
|
||||||
|
from devscripts.utils import read_version
|
||||||
|
|
||||||
|
OS_NAME, MACHINE, ARCH = sys.platform, platform.machine().lower(), platform.architecture()[0][:2]
|
||||||
|
if MACHINE in ('x86', 'x86_64', 'amd64', 'i386', 'i686'):
|
||||||
|
MACHINE = 'x86' if ARCH == '32' else ''
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
opts = parse_options()
|
opts, version = parse_options(), read_version()
|
||||||
version = read_version()
|
|
||||||
|
|
||||||
suffix = '_macos' if OS_NAME == 'Darwin' else '_x86' if ARCH == '32' else ''
|
onedir = '--onedir' in opts or '-D' in opts
|
||||||
final_file = 'dist/%syt-dlp%s%s' % (
|
if not onedir and '-F' not in opts and '--onefile' not in opts:
|
||||||
'yt-dlp/' if '--onedir' in opts else '', suffix, '.exe' if OS_NAME == 'Windows' else '')
|
opts.append('--onefile')
|
||||||
|
|
||||||
print(f'Building yt-dlp v{version} {ARCH}bit for {OS_NAME} with options {opts}')
|
name, final_file = exe(onedir)
|
||||||
|
print(f'Building yt-dlp v{version} for {OS_NAME} {platform.machine()} with options {opts}')
|
||||||
print('Remember to update the version using "devscripts/update-version.py"')
|
print('Remember to update the version using "devscripts/update-version.py"')
|
||||||
if not os.path.isfile('yt_dlp/extractor/lazy_extractors.py'):
|
if not os.path.isfile('yt_dlp/extractor/lazy_extractors.py'):
|
||||||
print('WARNING: Building without lazy_extractors. Run '
|
print('WARNING: Building without lazy_extractors. Run '
|
||||||
@@ -36,36 +33,43 @@ def main():
|
|||||||
print(f'Destination: {final_file}\n')
|
print(f'Destination: {final_file}\n')
|
||||||
|
|
||||||
opts = [
|
opts = [
|
||||||
f'--name=yt-dlp{suffix}',
|
f'--name={name}',
|
||||||
'--icon=devscripts/logo.ico',
|
'--icon=devscripts/logo.ico',
|
||||||
'--upx-exclude=vcruntime140.dll',
|
'--upx-exclude=vcruntime140.dll',
|
||||||
'--noconfirm',
|
'--noconfirm',
|
||||||
*dependency_options(),
|
'--additional-hooks-dir=yt_dlp/__pyinstaller',
|
||||||
*opts,
|
*opts,
|
||||||
'yt_dlp/__main__.py',
|
'yt_dlp/__main__.py',
|
||||||
]
|
]
|
||||||
|
|
||||||
print(f'Running PyInstaller with {opts}')
|
print(f'Running PyInstaller with {opts}')
|
||||||
|
run_pyinstaller(opts)
|
||||||
import PyInstaller.__main__
|
|
||||||
|
|
||||||
PyInstaller.__main__.run(opts)
|
|
||||||
|
|
||||||
set_version_info(final_file, version)
|
set_version_info(final_file, version)
|
||||||
|
|
||||||
|
|
||||||
def parse_options():
|
def parse_options():
|
||||||
# Compatability with older arguments
|
# Compatibility with older arguments
|
||||||
opts = sys.argv[1:]
|
opts = sys.argv[1:]
|
||||||
if opts[0:1] in (['32'], ['64']):
|
if opts[0:1] in (['32'], ['64']):
|
||||||
if ARCH != opts[0]:
|
if ARCH != opts[0]:
|
||||||
raise Exception(f'{opts[0]}bit executable cannot be built on a {ARCH}bit system')
|
raise Exception(f'{opts[0]}bit executable cannot be built on a {ARCH}bit system')
|
||||||
opts = opts[1:]
|
opts = opts[1:]
|
||||||
return opts or ['--onefile']
|
return opts
|
||||||
|
|
||||||
|
|
||||||
def read_version():
|
def exe(onedir):
|
||||||
exec(compile(open('yt_dlp/version.py').read(), 'yt_dlp/version.py', 'exec'))
|
"""@returns (name, path)"""
|
||||||
return locals()['__version__']
|
name = '_'.join(filter(None, (
|
||||||
|
'yt-dlp',
|
||||||
|
{'win32': '', 'darwin': 'macos'}.get(OS_NAME, OS_NAME),
|
||||||
|
MACHINE,
|
||||||
|
)))
|
||||||
|
return name, ''.join(filter(None, (
|
||||||
|
'dist/',
|
||||||
|
onedir and f'{name}/',
|
||||||
|
name,
|
||||||
|
OS_NAME == 'win32' and '.exe'
|
||||||
|
)))
|
||||||
|
|
||||||
|
|
||||||
def version_to_list(version):
|
def version_to_list(version):
|
||||||
@@ -73,36 +77,29 @@ def version_to_list(version):
|
|||||||
return list(map(int, version_list)) + [0] * (4 - len(version_list))
|
return list(map(int, version_list)) + [0] * (4 - len(version_list))
|
||||||
|
|
||||||
|
|
||||||
def dependency_options():
|
|
||||||
dependencies = [pycryptodome_module(), 'mutagen', 'brotli', 'certifi'] + collect_submodules('websockets')
|
|
||||||
excluded_modules = ['test', 'ytdlp_plugins', 'youtube-dl', 'youtube-dlc']
|
|
||||||
|
|
||||||
yield from (f'--hidden-import={module}' for module in dependencies)
|
|
||||||
yield from (f'--exclude-module={module}' for module in excluded_modules)
|
|
||||||
|
|
||||||
|
|
||||||
def pycryptodome_module():
|
|
||||||
try:
|
|
||||||
import Cryptodome # noqa: F401
|
|
||||||
except ImportError:
|
|
||||||
try:
|
|
||||||
import Crypto # noqa: F401
|
|
||||||
print('WARNING: Using Crypto since Cryptodome is not available. '
|
|
||||||
'Install with: pip install pycryptodomex', file=sys.stderr)
|
|
||||||
return 'Crypto'
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
return 'Cryptodome'
|
|
||||||
|
|
||||||
|
|
||||||
def set_version_info(exe, version):
|
def set_version_info(exe, version):
|
||||||
if OS_NAME == 'Windows':
|
if OS_NAME == 'win32':
|
||||||
windows_set_version(exe, version)
|
windows_set_version(exe, version)
|
||||||
|
|
||||||
|
|
||||||
def windows_set_version(exe, version):
|
def windows_set_version(exe, version):
|
||||||
|
from PyInstaller.utils.win32.versioninfo import (
|
||||||
|
FixedFileInfo,
|
||||||
|
StringFileInfo,
|
||||||
|
StringStruct,
|
||||||
|
StringTable,
|
||||||
|
VarFileInfo,
|
||||||
|
VarStruct,
|
||||||
|
VSVersionInfo,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from PyInstaller.utils.win32.versioninfo import SetVersion
|
||||||
|
except ImportError: # Pyinstaller >= 5.8
|
||||||
|
from PyInstaller.utils.win32.versioninfo import write_version_info_to_executable as SetVersion
|
||||||
|
|
||||||
version_list = version_to_list(version)
|
version_list = version_to_list(version)
|
||||||
suffix = '_x86' if ARCH == '32' else ''
|
suffix = MACHINE and f'_{MACHINE}'
|
||||||
SetVersion(exe, VSVersionInfo(
|
SetVersion(exe, VSVersionInfo(
|
||||||
ffi=FixedFileInfo(
|
ffi=FixedFileInfo(
|
||||||
filevers=version_list,
|
filevers=version_list,
|
||||||
@@ -116,9 +113,9 @@ def windows_set_version(exe, version):
|
|||||||
),
|
),
|
||||||
kids=[
|
kids=[
|
||||||
StringFileInfo([StringTable('040904B0', [
|
StringFileInfo([StringTable('040904B0', [
|
||||||
StringStruct('Comments', 'yt-dlp%s Command Line Interface.' % suffix),
|
StringStruct('Comments', 'yt-dlp%s Command Line Interface' % suffix),
|
||||||
StringStruct('CompanyName', 'https://github.com/yt-dlp'),
|
StringStruct('CompanyName', 'https://github.com/yt-dlp'),
|
||||||
StringStruct('FileDescription', 'yt-dlp%s' % (' (32 Bit)' if ARCH == '32' else '')),
|
StringStruct('FileDescription', 'yt-dlp%s' % (MACHINE and f' ({MACHINE})')),
|
||||||
StringStruct('FileVersion', version),
|
StringStruct('FileVersion', version),
|
||||||
StringStruct('InternalName', f'yt-dlp{suffix}'),
|
StringStruct('InternalName', f'yt-dlp{suffix}'),
|
||||||
StringStruct('LegalCopyright', 'pukkandan.ytdlp@gmail.com | UNLICENSE'),
|
StringStruct('LegalCopyright', 'pukkandan.ytdlp@gmail.com | UNLICENSE'),
|
||||||
Binary file not shown.
Binary file not shown.
0
devscripts/__init__.py
Normal file
0
devscripts/__init__.py
Normal file
@@ -1,11 +1,12 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
import os
|
import os
|
||||||
from os.path import dirname as dirn
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
sys.path.insert(0, dirn(dirn((os.path.abspath(__file__)))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
import yt_dlp
|
import yt_dlp
|
||||||
|
|
||||||
BASH_COMPLETION_FILE = "completions/bash/yt-dlp"
|
BASH_COMPLETION_FILE = "completions/bash/yt-dlp"
|
||||||
@@ -26,5 +27,5 @@ def build_completion(opt_parser):
|
|||||||
f.write(filled_template)
|
f.write(filled_template)
|
||||||
|
|
||||||
|
|
||||||
parser = yt_dlp.parseOpts()[0]
|
parser = yt_dlp.parseOpts(ignore_config_files=True)[0]
|
||||||
build_completion(parser)
|
build_completion(parser)
|
||||||
|
|||||||
@@ -1,435 +0,0 @@
|
|||||||
# UNUSED
|
|
||||||
|
|
||||||
#!/usr/bin/python3
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import ctypes
|
|
||||||
import functools
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
import threading
|
|
||||||
import traceback
|
|
||||||
import os.path
|
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname((os.path.abspath(__file__)))))
|
|
||||||
from yt_dlp.compat import (
|
|
||||||
compat_input,
|
|
||||||
compat_http_server,
|
|
||||||
compat_str,
|
|
||||||
compat_urlparse,
|
|
||||||
)
|
|
||||||
|
|
||||||
# These are not used outside of buildserver.py thus not in compat.py
|
|
||||||
|
|
||||||
try:
|
|
||||||
import winreg as compat_winreg
|
|
||||||
except ImportError: # Python 2
|
|
||||||
import _winreg as compat_winreg
|
|
||||||
|
|
||||||
try:
|
|
||||||
import socketserver as compat_socketserver
|
|
||||||
except ImportError: # Python 2
|
|
||||||
import SocketServer as compat_socketserver
|
|
||||||
|
|
||||||
|
|
||||||
class BuildHTTPServer(compat_socketserver.ThreadingMixIn, compat_http_server.HTTPServer):
|
|
||||||
allow_reuse_address = True
|
|
||||||
|
|
||||||
|
|
||||||
advapi32 = ctypes.windll.advapi32
|
|
||||||
|
|
||||||
SC_MANAGER_ALL_ACCESS = 0xf003f
|
|
||||||
SC_MANAGER_CREATE_SERVICE = 0x02
|
|
||||||
SERVICE_WIN32_OWN_PROCESS = 0x10
|
|
||||||
SERVICE_AUTO_START = 0x2
|
|
||||||
SERVICE_ERROR_NORMAL = 0x1
|
|
||||||
DELETE = 0x00010000
|
|
||||||
SERVICE_STATUS_START_PENDING = 0x00000002
|
|
||||||
SERVICE_STATUS_RUNNING = 0x00000004
|
|
||||||
SERVICE_ACCEPT_STOP = 0x1
|
|
||||||
|
|
||||||
SVCNAME = 'youtubedl_builder'
|
|
||||||
|
|
||||||
LPTSTR = ctypes.c_wchar_p
|
|
||||||
START_CALLBACK = ctypes.WINFUNCTYPE(None, ctypes.c_int, ctypes.POINTER(LPTSTR))
|
|
||||||
|
|
||||||
|
|
||||||
class SERVICE_TABLE_ENTRY(ctypes.Structure):
|
|
||||||
_fields_ = [
|
|
||||||
('lpServiceName', LPTSTR),
|
|
||||||
('lpServiceProc', START_CALLBACK)
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
HandlerEx = ctypes.WINFUNCTYPE(
|
|
||||||
ctypes.c_int, # return
|
|
||||||
ctypes.c_int, # dwControl
|
|
||||||
ctypes.c_int, # dwEventType
|
|
||||||
ctypes.c_void_p, # lpEventData,
|
|
||||||
ctypes.c_void_p, # lpContext,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _ctypes_array(c_type, py_array):
|
|
||||||
ar = (c_type * len(py_array))()
|
|
||||||
ar[:] = py_array
|
|
||||||
return ar
|
|
||||||
|
|
||||||
|
|
||||||
def win_OpenSCManager():
|
|
||||||
res = advapi32.OpenSCManagerW(None, None, SC_MANAGER_ALL_ACCESS)
|
|
||||||
if not res:
|
|
||||||
raise Exception('Opening service manager failed - '
|
|
||||||
'are you running this as administrator?')
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
def win_install_service(service_name, cmdline):
|
|
||||||
manager = win_OpenSCManager()
|
|
||||||
try:
|
|
||||||
h = advapi32.CreateServiceW(
|
|
||||||
manager, service_name, None,
|
|
||||||
SC_MANAGER_CREATE_SERVICE, SERVICE_WIN32_OWN_PROCESS,
|
|
||||||
SERVICE_AUTO_START, SERVICE_ERROR_NORMAL,
|
|
||||||
cmdline, None, None, None, None, None)
|
|
||||||
if not h:
|
|
||||||
raise OSError('Service creation failed: %s' % ctypes.FormatError())
|
|
||||||
|
|
||||||
advapi32.CloseServiceHandle(h)
|
|
||||||
finally:
|
|
||||||
advapi32.CloseServiceHandle(manager)
|
|
||||||
|
|
||||||
|
|
||||||
def win_uninstall_service(service_name):
|
|
||||||
manager = win_OpenSCManager()
|
|
||||||
try:
|
|
||||||
h = advapi32.OpenServiceW(manager, service_name, DELETE)
|
|
||||||
if not h:
|
|
||||||
raise OSError('Could not find service %s: %s' % (
|
|
||||||
service_name, ctypes.FormatError()))
|
|
||||||
|
|
||||||
try:
|
|
||||||
if not advapi32.DeleteService(h):
|
|
||||||
raise OSError('Deletion failed: %s' % ctypes.FormatError())
|
|
||||||
finally:
|
|
||||||
advapi32.CloseServiceHandle(h)
|
|
||||||
finally:
|
|
||||||
advapi32.CloseServiceHandle(manager)
|
|
||||||
|
|
||||||
|
|
||||||
def win_service_report_event(service_name, msg, is_error=True):
|
|
||||||
with open('C:/sshkeys/log', 'a', encoding='utf-8') as f:
|
|
||||||
f.write(msg + '\n')
|
|
||||||
|
|
||||||
event_log = advapi32.RegisterEventSourceW(None, service_name)
|
|
||||||
if not event_log:
|
|
||||||
raise OSError('Could not report event: %s' % ctypes.FormatError())
|
|
||||||
|
|
||||||
try:
|
|
||||||
type_id = 0x0001 if is_error else 0x0004
|
|
||||||
event_id = 0xc0000000 if is_error else 0x40000000
|
|
||||||
lines = _ctypes_array(LPTSTR, [msg])
|
|
||||||
|
|
||||||
if not advapi32.ReportEventW(
|
|
||||||
event_log, type_id, 0, event_id, None, len(lines), 0,
|
|
||||||
lines, None):
|
|
||||||
raise OSError('Event reporting failed: %s' % ctypes.FormatError())
|
|
||||||
finally:
|
|
||||||
advapi32.DeregisterEventSource(event_log)
|
|
||||||
|
|
||||||
|
|
||||||
def win_service_handler(stop_event, *args):
|
|
||||||
try:
|
|
||||||
raise ValueError('Handler called with args ' + repr(args))
|
|
||||||
TODO
|
|
||||||
except Exception as e:
|
|
||||||
tb = traceback.format_exc()
|
|
||||||
msg = str(e) + '\n' + tb
|
|
||||||
win_service_report_event(service_name, msg, is_error=True)
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def win_service_set_status(handle, status_code):
|
|
||||||
svcStatus = SERVICE_STATUS()
|
|
||||||
svcStatus.dwServiceType = SERVICE_WIN32_OWN_PROCESS
|
|
||||||
svcStatus.dwCurrentState = status_code
|
|
||||||
svcStatus.dwControlsAccepted = SERVICE_ACCEPT_STOP
|
|
||||||
|
|
||||||
svcStatus.dwServiceSpecificExitCode = 0
|
|
||||||
|
|
||||||
if not advapi32.SetServiceStatus(handle, ctypes.byref(svcStatus)):
|
|
||||||
raise OSError('SetServiceStatus failed: %r' % ctypes.FormatError())
|
|
||||||
|
|
||||||
|
|
||||||
def win_service_main(service_name, real_main, argc, argv_raw):
|
|
||||||
try:
|
|
||||||
# args = [argv_raw[i].value for i in range(argc)]
|
|
||||||
stop_event = threading.Event()
|
|
||||||
handler = HandlerEx(functools.partial(stop_event, win_service_handler))
|
|
||||||
h = advapi32.RegisterServiceCtrlHandlerExW(service_name, handler, None)
|
|
||||||
if not h:
|
|
||||||
raise OSError('Handler registration failed: %s' %
|
|
||||||
ctypes.FormatError())
|
|
||||||
|
|
||||||
TODO
|
|
||||||
except Exception as e:
|
|
||||||
tb = traceback.format_exc()
|
|
||||||
msg = str(e) + '\n' + tb
|
|
||||||
win_service_report_event(service_name, msg, is_error=True)
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def win_service_start(service_name, real_main):
|
|
||||||
try:
|
|
||||||
cb = START_CALLBACK(
|
|
||||||
functools.partial(win_service_main, service_name, real_main))
|
|
||||||
dispatch_table = _ctypes_array(SERVICE_TABLE_ENTRY, [
|
|
||||||
SERVICE_TABLE_ENTRY(
|
|
||||||
service_name,
|
|
||||||
cb
|
|
||||||
),
|
|
||||||
SERVICE_TABLE_ENTRY(None, ctypes.cast(None, START_CALLBACK))
|
|
||||||
])
|
|
||||||
|
|
||||||
if not advapi32.StartServiceCtrlDispatcherW(dispatch_table):
|
|
||||||
raise OSError('ctypes start failed: %s' % ctypes.FormatError())
|
|
||||||
except Exception as e:
|
|
||||||
tb = traceback.format_exc()
|
|
||||||
msg = str(e) + '\n' + tb
|
|
||||||
win_service_report_event(service_name, msg, is_error=True)
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def main(args=None):
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument('-i', '--install',
|
|
||||||
action='store_const', dest='action', const='install',
|
|
||||||
help='Launch at Windows startup')
|
|
||||||
parser.add_argument('-u', '--uninstall',
|
|
||||||
action='store_const', dest='action', const='uninstall',
|
|
||||||
help='Remove Windows service')
|
|
||||||
parser.add_argument('-s', '--service',
|
|
||||||
action='store_const', dest='action', const='service',
|
|
||||||
help='Run as a Windows service')
|
|
||||||
parser.add_argument('-b', '--bind', metavar='<host:port>',
|
|
||||||
action='store', default='0.0.0.0:8142',
|
|
||||||
help='Bind to host:port (default %default)')
|
|
||||||
options = parser.parse_args(args=args)
|
|
||||||
|
|
||||||
if options.action == 'install':
|
|
||||||
fn = os.path.abspath(__file__).replace('v:', '\\\\vboxsrv\\vbox')
|
|
||||||
cmdline = '%s %s -s -b %s' % (sys.executable, fn, options.bind)
|
|
||||||
win_install_service(SVCNAME, cmdline)
|
|
||||||
return
|
|
||||||
|
|
||||||
if options.action == 'uninstall':
|
|
||||||
win_uninstall_service(SVCNAME)
|
|
||||||
return
|
|
||||||
|
|
||||||
if options.action == 'service':
|
|
||||||
win_service_start(SVCNAME, main)
|
|
||||||
return
|
|
||||||
|
|
||||||
host, port_str = options.bind.split(':')
|
|
||||||
port = int(port_str)
|
|
||||||
|
|
||||||
print('Listening on %s:%d' % (host, port))
|
|
||||||
srv = BuildHTTPServer((host, port), BuildHTTPRequestHandler)
|
|
||||||
thr = threading.Thread(target=srv.serve_forever)
|
|
||||||
thr.start()
|
|
||||||
compat_input('Press ENTER to shut down')
|
|
||||||
srv.shutdown()
|
|
||||||
thr.join()
|
|
||||||
|
|
||||||
|
|
||||||
def rmtree(path):
|
|
||||||
for name in os.listdir(path):
|
|
||||||
fname = os.path.join(path, name)
|
|
||||||
if os.path.isdir(fname):
|
|
||||||
rmtree(fname)
|
|
||||||
else:
|
|
||||||
os.chmod(fname, 0o666)
|
|
||||||
os.remove(fname)
|
|
||||||
os.rmdir(path)
|
|
||||||
|
|
||||||
|
|
||||||
class BuildError(Exception):
|
|
||||||
def __init__(self, output, code=500):
|
|
||||||
self.output = output
|
|
||||||
self.code = code
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self.output
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPError(BuildError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class PythonBuilder(object):
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
python_version = kwargs.pop('python', '3.4')
|
|
||||||
python_path = None
|
|
||||||
for node in ('Wow6432Node\\', ''):
|
|
||||||
try:
|
|
||||||
key = compat_winreg.OpenKey(
|
|
||||||
compat_winreg.HKEY_LOCAL_MACHINE,
|
|
||||||
r'SOFTWARE\%sPython\PythonCore\%s\InstallPath' % (node, python_version))
|
|
||||||
try:
|
|
||||||
python_path, _ = compat_winreg.QueryValueEx(key, '')
|
|
||||||
finally:
|
|
||||||
compat_winreg.CloseKey(key)
|
|
||||||
break
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if not python_path:
|
|
||||||
raise BuildError('No such Python version: %s' % python_version)
|
|
||||||
|
|
||||||
self.pythonPath = python_path
|
|
||||||
|
|
||||||
super(PythonBuilder, self).__init__(**kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class GITInfoBuilder(object):
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
try:
|
|
||||||
self.user, self.repoName = kwargs['path'][:2]
|
|
||||||
self.rev = kwargs.pop('rev')
|
|
||||||
except ValueError:
|
|
||||||
raise BuildError('Invalid path')
|
|
||||||
except KeyError as e:
|
|
||||||
raise BuildError('Missing mandatory parameter "%s"' % e.args[0])
|
|
||||||
|
|
||||||
path = os.path.join(os.environ['APPDATA'], 'Build archive', self.repoName, self.user)
|
|
||||||
if not os.path.exists(path):
|
|
||||||
os.makedirs(path)
|
|
||||||
self.basePath = tempfile.mkdtemp(dir=path)
|
|
||||||
self.buildPath = os.path.join(self.basePath, 'build')
|
|
||||||
|
|
||||||
super(GITInfoBuilder, self).__init__(**kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class GITBuilder(GITInfoBuilder):
|
|
||||||
def build(self):
|
|
||||||
try:
|
|
||||||
subprocess.check_output(['git', 'clone', 'git://github.com/%s/%s.git' % (self.user, self.repoName), self.buildPath])
|
|
||||||
subprocess.check_output(['git', 'checkout', self.rev], cwd=self.buildPath)
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
raise BuildError(e.output)
|
|
||||||
|
|
||||||
super(GITBuilder, self).build()
|
|
||||||
|
|
||||||
|
|
||||||
class YoutubeDLBuilder(object):
|
|
||||||
authorizedUsers = ['fraca7', 'phihag', 'rg3', 'FiloSottile', 'ytdl-org']
|
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
if self.repoName != 'yt-dlp':
|
|
||||||
raise BuildError('Invalid repository "%s"' % self.repoName)
|
|
||||||
if self.user not in self.authorizedUsers:
|
|
||||||
raise HTTPError('Unauthorized user "%s"' % self.user, 401)
|
|
||||||
|
|
||||||
super(YoutubeDLBuilder, self).__init__(**kwargs)
|
|
||||||
|
|
||||||
def build(self):
|
|
||||||
try:
|
|
||||||
proc = subprocess.Popen([os.path.join(self.pythonPath, 'python.exe'), 'setup.py', 'py2exe'], stdin=subprocess.PIPE, cwd=self.buildPath)
|
|
||||||
proc.wait()
|
|
||||||
#subprocess.check_output([os.path.join(self.pythonPath, 'python.exe'), 'setup.py', 'py2exe'],
|
|
||||||
# cwd=self.buildPath)
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
raise BuildError(e.output)
|
|
||||||
|
|
||||||
super(YoutubeDLBuilder, self).build()
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadBuilder(object):
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
self.handler = kwargs.pop('handler')
|
|
||||||
self.srcPath = os.path.join(self.buildPath, *tuple(kwargs['path'][2:]))
|
|
||||||
self.srcPath = os.path.abspath(os.path.normpath(self.srcPath))
|
|
||||||
if not self.srcPath.startswith(self.buildPath):
|
|
||||||
raise HTTPError(self.srcPath, 401)
|
|
||||||
|
|
||||||
super(DownloadBuilder, self).__init__(**kwargs)
|
|
||||||
|
|
||||||
def build(self):
|
|
||||||
if not os.path.exists(self.srcPath):
|
|
||||||
raise HTTPError('No such file', 404)
|
|
||||||
if os.path.isdir(self.srcPath):
|
|
||||||
raise HTTPError('Is a directory: %s' % self.srcPath, 401)
|
|
||||||
|
|
||||||
self.handler.send_response(200)
|
|
||||||
self.handler.send_header('Content-Type', 'application/octet-stream')
|
|
||||||
self.handler.send_header('Content-Disposition', 'attachment; filename=%s' % os.path.split(self.srcPath)[-1])
|
|
||||||
self.handler.send_header('Content-Length', str(os.stat(self.srcPath).st_size))
|
|
||||||
self.handler.end_headers()
|
|
||||||
|
|
||||||
with open(self.srcPath, 'rb') as src:
|
|
||||||
shutil.copyfileobj(src, self.handler.wfile)
|
|
||||||
|
|
||||||
super(DownloadBuilder, self).build()
|
|
||||||
|
|
||||||
|
|
||||||
class CleanupTempDir(object):
|
|
||||||
def build(self):
|
|
||||||
try:
|
|
||||||
rmtree(self.basePath)
|
|
||||||
except Exception as e:
|
|
||||||
print('WARNING deleting "%s": %s' % (self.basePath, e))
|
|
||||||
|
|
||||||
super(CleanupTempDir, self).build()
|
|
||||||
|
|
||||||
|
|
||||||
class Null(object):
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def build(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Builder(PythonBuilder, GITBuilder, YoutubeDLBuilder, DownloadBuilder, CleanupTempDir, Null):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class BuildHTTPRequestHandler(compat_http_server.BaseHTTPRequestHandler):
|
|
||||||
actionDict = {'build': Builder, 'download': Builder} # They're the same, no more caching.
|
|
||||||
|
|
||||||
def do_GET(self):
|
|
||||||
path = compat_urlparse.urlparse(self.path)
|
|
||||||
paramDict = dict([(key, value[0]) for key, value in compat_urlparse.parse_qs(path.query).items()])
|
|
||||||
action, _, path = path.path.strip('/').partition('/')
|
|
||||||
if path:
|
|
||||||
path = path.split('/')
|
|
||||||
if action in self.actionDict:
|
|
||||||
try:
|
|
||||||
builder = self.actionDict[action](path=path, handler=self, **paramDict)
|
|
||||||
builder.start()
|
|
||||||
try:
|
|
||||||
builder.build()
|
|
||||||
finally:
|
|
||||||
builder.close()
|
|
||||||
except BuildError as e:
|
|
||||||
self.send_response(e.code)
|
|
||||||
msg = compat_str(e).encode('UTF-8')
|
|
||||||
self.send_header('Content-Type', 'text/plain; charset=UTF-8')
|
|
||||||
self.send_header('Content-Length', len(msg))
|
|
||||||
self.end_headers()
|
|
||||||
self.wfile.write(msg)
|
|
||||||
else:
|
|
||||||
self.send_response(500, 'Unknown build method "%s"' % action)
|
|
||||||
else:
|
|
||||||
self.send_response(500, 'Malformed URL')
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
173
devscripts/changelog_override.json
Normal file
173
devscripts/changelog_override.json
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "29cb20bd563c02671b31dd840139e93dd37150a1",
|
||||||
|
"short": "[priority] **A new release type has been added!**\n * [`nightly`](https://github.com/yt-dlp/yt-dlp/releases/tag/nightly) builds will be made after each push, containing the latest fixes (but also possibly bugs).\n * When using `--update`/`-U`, a release binary will only update to its current channel (either `stable` or `nightly`).\n * The `--update-to` option has been added allowing the user more control over program upgrades (or downgrades).\n * `--update-to` can change the release channel (`stable`, `nightly`) and also upgrade or downgrade to specific tags.\n * **Usage**: `--update-to CHANNEL`, `--update-to TAG`, `--update-to CHANNEL@TAG`"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "5038f6d713303e0967d002216e7a88652401c22a",
|
||||||
|
"short": "[priority] **YouTube throttling fixes!**"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "remove",
|
||||||
|
"when": "2e023649ea4e11151545a34dc1360c114981a236"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "01aba2519a0884ef17d5f85608dbd2a455577147",
|
||||||
|
"short": "[priority] YouTube: Improved throttling and signature fixes"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "c86e433c35fe5da6cb29f3539eef97497f84ed38",
|
||||||
|
"short": "[extractor/niconico:series] Fix extraction (#6898)",
|
||||||
|
"authors": ["sqrtNOT"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "69a40e4a7f6caa5662527ebd2f3c4e8aa02857a2",
|
||||||
|
"short": "[extractor/youtube:music_search_url] Extract title (#7102)",
|
||||||
|
"authors": ["kangalio"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "8417f26b8a819cd7ffcd4e000ca3e45033e670fb",
|
||||||
|
"short": "Add option `--color` (#6904)",
|
||||||
|
"authors": ["Grub4K"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "b4e0d75848e9447cee2cd3646ce54d4744a7ff56",
|
||||||
|
"short": "Improve `--download-sections`\n - Support negative time-ranges\n - Add `*from-url` to obey time-ranges in URL",
|
||||||
|
"authors": ["pukkandan"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "1e75d97db21152acc764b30a688e516f04b8a142",
|
||||||
|
"short": "[extractor/youtube] Add `ios` to default clients used\n - IOS is affected neither by 403 nor by nsig so helps mitigate them preemptively\n - IOS also has higher bit-rate 'premium' formats though they are not labeled as such",
|
||||||
|
"authors": ["pukkandan"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "f2ff0f6f1914b82d4a51681a72cc0828115dcb4a",
|
||||||
|
"short": "[extractor/motherless] Add gallery support, fix groups (#7211)",
|
||||||
|
"authors": ["rexlambert22", "Ti4eeT4e"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "a4486bfc1dc7057efca9dd3fe70d7fa25c56f700",
|
||||||
|
"short": "[misc] Revert \"Add automatic duplicate issue detection\"",
|
||||||
|
"authors": ["pukkandan"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "1ceb657bdd254ad961489e5060f2ccc7d556b729",
|
||||||
|
"short": "[priority] Security: [[CVE-2023-35934](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-35934)] Fix [Cookie leak](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-v8mc-9377-rwjj)\n - `--add-header Cookie:` is deprecated and auto-scoped to input URL domains\n - Cookies are scoped when passed to external downloaders\n - Add `cookies` field to info.json and deprecate `http_headers.Cookie`"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "b03fa7834579a01cc5fba48c0e73488a16683d48",
|
||||||
|
"short": "[ie/twitter] Revert 92315c03774cfabb3a921884326beb4b981f786b",
|
||||||
|
"authors": ["pukkandan"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "fcd6a76adc49d5cd8783985c7ce35384b72e545f",
|
||||||
|
"short": "[test] Add tests for socks proxies (#7908)",
|
||||||
|
"authors": ["coletdjnz"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "4bf912282a34b58b6b35d8f7e6be535770c89c76",
|
||||||
|
"short": "[rh:urllib] Remove dot segments during URL normalization (#7662)",
|
||||||
|
"authors": ["coletdjnz"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "59e92b1f1833440bb2190f847eb735cf0f90bc85",
|
||||||
|
"short": "[rh:urllib] Simplify gzip decoding (#7611)",
|
||||||
|
"authors": ["Grub4K"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "c1d71d0d9f41db5e4306c86af232f5f6220a130b",
|
||||||
|
"short": "[priority] **The minimum *recommended* Python version has been raised to 3.8**\nSince Python 3.7 has reached end-of-life, support for it will be dropped soon. [Read more](https://github.com/yt-dlp/yt-dlp/issues/7803)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "61bdf15fc7400601c3da1aa7a43917310a5bf391",
|
||||||
|
"short": "[priority] Security: [[CVE-2023-40581](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-40581)] [Prevent RCE when using `--exec` with `%q` on Windows](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-42h4-v29r-42qg)\n - The shell escape function is now using `\"\"` instead of `\\\"`.\n - `utils.Popen` has been patched to properly quote commands."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "8a8b54523addf46dfd50ef599761a81bc22362e6",
|
||||||
|
"short": "[rh:requests] Add handler for `requests` HTTP library (#3668)\n\n\tAdds support for HTTPS proxies and persistent connections (keep-alive)",
|
||||||
|
"authors": ["bashonly", "coletdjnz", "Grub4K"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "1d03633c5a1621b9f3a756f0a4f9dc61fab3aeaa",
|
||||||
|
"short": "[priority] **The release channels have been adjusted!**\n\t* [`master`](https://github.com/yt-dlp/yt-dlp-master-builds) builds are made after each push, containing the latest fixes (but also possibly bugs). This was previously the `nightly` channel.\n\t* [`nightly`](https://github.com/yt-dlp/yt-dlp-nightly-builds) builds are now made once a day, if there were any changes."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "f04b5bedad7b281bee9814686bba1762bae092eb",
|
||||||
|
"short": "[priority] Security: [[CVE-2023-46121](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-46121)] Patch [Generic Extractor MITM Vulnerability via Arbitrary Proxy Injection](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-3ch3-jhc6-5r8x)\n\t- Disallow smuggling of arbitrary `http_headers`; extractors now only use specific headers"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "15f22b4880b6b3f71f350c64d70976ae65b9f1ca",
|
||||||
|
"short": "[webvtt] Allow spaces before newlines for CueBlock (#7681)",
|
||||||
|
"authors": ["TSRBerry"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "4ce57d3b873c2887814cbec03d029533e82f7db5",
|
||||||
|
"short": "[ie] Support multi-period MPD streams (#6654)",
|
||||||
|
"authors": ["alard", "pukkandan"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "aa7e9ae4f48276bd5d0173966c77db9484f65a0a",
|
||||||
|
"short": "[ie/xvideos] Support new URL format (#9502)",
|
||||||
|
"authors": ["sta1us"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "remove",
|
||||||
|
"when": "22e4dfacb61f62dfbb3eb41b31c7b69ba1059b80"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "e3a3ed8a981d9395c4859b6ef56cd02bc3148db2",
|
||||||
|
"short": "[cleanup:ie] No `from` stdlib imports in extractors",
|
||||||
|
"authors": ["pukkandan"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "9590cc6b4768e190183d7d071a6c78170889116a",
|
||||||
|
"short": "[priority] Security: [[CVE-2024-22423](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2024-22423)] [Prevent RCE when using `--exec` with `%q` on Windows](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-hjq6-52gw-2g7p)\n - The shell escape function now properly escapes `%`, `\\` and `\\n`.\n - `utils.Popen` has been patched accordingly."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "41ba4a808b597a3afed78c89675a30deb6844450",
|
||||||
|
"short": "[ie/tiktok] Extract via mobile API only if extractor-arg is passed (#9938)",
|
||||||
|
"authors": ["bashonly"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "remove",
|
||||||
|
"when": "6e36d17f404556f0e3a43f441c477a71a91877d9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "beaf832c7a9d57833f365ce18f6115b88071b296",
|
||||||
|
"short": "[ie/soundcloud] Add `formats` extractor-arg (#10004)",
|
||||||
|
"authors": ["bashonly", "Grub4K"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "5c019f6328ad40d66561eac3c4de0b3cd070d0f6",
|
||||||
|
"short": "[cleanup] Misc (#9765)",
|
||||||
|
"authors": ["bashonly", "Grub4K", "seproDev"]
|
||||||
|
}
|
||||||
|
]
|
||||||
96
devscripts/changelog_override.schema.json
Normal file
96
devscripts/changelog_override.schema.json
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/draft/2020-12/schema",
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": true,
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"action": {
|
||||||
|
"enum": [
|
||||||
|
"add"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"when": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^([0-9a-f]{40}|\\d{4}\\.\\d{2}\\.\\d{2})$"
|
||||||
|
},
|
||||||
|
"hash": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^[0-9a-f]{40}$"
|
||||||
|
},
|
||||||
|
"short": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"authors": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": [
|
||||||
|
"action",
|
||||||
|
"short"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"action": {
|
||||||
|
"enum": [
|
||||||
|
"remove"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"when": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^([0-9a-f]{40}|\\d{4}\\.\\d{2}\\.\\d{2})$"
|
||||||
|
},
|
||||||
|
"hash": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^[0-9a-f]{40}$"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": [
|
||||||
|
"action",
|
||||||
|
"hash"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"action": {
|
||||||
|
"enum": [
|
||||||
|
"change"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"when": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^([0-9a-f]{40}|\\d{4}\\.\\d{2}\\.\\d{2})$"
|
||||||
|
},
|
||||||
|
"hash": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^[0-9a-f]{40}$"
|
||||||
|
},
|
||||||
|
"short": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"authors": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": [
|
||||||
|
"action",
|
||||||
|
"hash",
|
||||||
|
"short",
|
||||||
|
"authors"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,6 +1,4 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
This script employs a VERY basic heuristic ('porn' in webpage.lower()) to check
|
This script employs a VERY basic heuristic ('porn' in webpage.lower()) to check
|
||||||
if we are not 'age_limit' tagging some porn site
|
if we are not 'age_limit' tagging some porn site
|
||||||
@@ -12,11 +10,14 @@ pass the list filename as the only argument
|
|||||||
# Allow direct execution
|
# Allow direct execution
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
|
import urllib.parse
|
||||||
|
import urllib.request
|
||||||
|
|
||||||
from test.helper import gettestcases
|
from test.helper import gettestcases
|
||||||
from yt_dlp.utils import compat_urllib_parse_urlparse
|
|
||||||
from yt_dlp.utils import compat_urllib_request
|
|
||||||
|
|
||||||
if len(sys.argv) > 1:
|
if len(sys.argv) > 1:
|
||||||
METHOD = 'LIST'
|
METHOD = 'LIST'
|
||||||
@@ -27,9 +28,9 @@ else:
|
|||||||
for test in gettestcases():
|
for test in gettestcases():
|
||||||
if METHOD == 'EURISTIC':
|
if METHOD == 'EURISTIC':
|
||||||
try:
|
try:
|
||||||
webpage = compat_urllib_request.urlopen(test['url'], timeout=10).read()
|
webpage = urllib.request.urlopen(test['url'], timeout=10).read()
|
||||||
except Exception:
|
except Exception:
|
||||||
print('\nFail: {0}'.format(test['name']))
|
print('\nFail: {}'.format(test['name']))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
webpage = webpage.decode('utf8', 'replace')
|
webpage = webpage.decode('utf8', 'replace')
|
||||||
@@ -37,9 +38,9 @@ for test in gettestcases():
|
|||||||
RESULT = 'porn' in webpage.lower()
|
RESULT = 'porn' in webpage.lower()
|
||||||
|
|
||||||
elif METHOD == 'LIST':
|
elif METHOD == 'LIST':
|
||||||
domain = compat_urllib_parse_urlparse(test['url']).netloc
|
domain = urllib.parse.urlparse(test['url']).netloc
|
||||||
if not domain:
|
if not domain:
|
||||||
print('\nFail: {0}'.format(test['name']))
|
print('\nFail: {}'.format(test['name']))
|
||||||
continue
|
continue
|
||||||
domain = '.'.join(domain.split('.')[-2:])
|
domain = '.'.join(domain.split('.')[-2:])
|
||||||
|
|
||||||
@@ -47,11 +48,11 @@ for test in gettestcases():
|
|||||||
|
|
||||||
if RESULT and ('info_dict' not in test or 'age_limit' not in test['info_dict']
|
if RESULT and ('info_dict' not in test or 'age_limit' not in test['info_dict']
|
||||||
or test['info_dict']['age_limit'] != 18):
|
or test['info_dict']['age_limit'] != 18):
|
||||||
print('\nPotential missing age_limit check: {0}'.format(test['name']))
|
print('\nPotential missing age_limit check: {}'.format(test['name']))
|
||||||
|
|
||||||
elif not RESULT and ('info_dict' in test and 'age_limit' in test['info_dict']
|
elif not RESULT and ('info_dict' in test and 'age_limit' in test['info_dict']
|
||||||
and test['info_dict']['age_limit'] == 18):
|
and test['info_dict']['age_limit'] == 18):
|
||||||
print('\nPotential false negative: {0}'.format(test['name']))
|
print('\nPotential false negative: {}'.format(test['name']))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
sys.stdout.write('.')
|
sys.stdout.write('.')
|
||||||
|
|||||||
48
devscripts/cli_to_api.py
Normal file
48
devscripts/cli_to_api.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import yt_dlp
|
||||||
|
import yt_dlp.options
|
||||||
|
|
||||||
|
create_parser = yt_dlp.options.create_parser
|
||||||
|
|
||||||
|
|
||||||
|
def parse_patched_options(opts):
|
||||||
|
patched_parser = create_parser()
|
||||||
|
patched_parser.defaults.update({
|
||||||
|
'ignoreerrors': False,
|
||||||
|
'retries': 0,
|
||||||
|
'fragment_retries': 0,
|
||||||
|
'extract_flat': False,
|
||||||
|
'concat_playlist': 'never',
|
||||||
|
})
|
||||||
|
yt_dlp.options.create_parser = lambda: patched_parser
|
||||||
|
try:
|
||||||
|
return yt_dlp.parse_options(opts)
|
||||||
|
finally:
|
||||||
|
yt_dlp.options.create_parser = create_parser
|
||||||
|
|
||||||
|
|
||||||
|
default_opts = parse_patched_options([]).ydl_opts
|
||||||
|
|
||||||
|
|
||||||
|
def cli_to_api(opts, cli_defaults=False):
|
||||||
|
opts = (yt_dlp.parse_options if cli_defaults else parse_patched_options)(opts).ydl_opts
|
||||||
|
|
||||||
|
diff = {k: v for k, v in opts.items() if default_opts[k] != v}
|
||||||
|
if 'postprocessors' in diff:
|
||||||
|
diff['postprocessors'] = [pp for pp in diff['postprocessors']
|
||||||
|
if pp not in default_opts['postprocessors']]
|
||||||
|
return diff
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
from pprint import pprint
|
||||||
|
|
||||||
|
print('\nThe arguments passed translate to:\n')
|
||||||
|
pprint(cli_to_api(sys.argv[1:]))
|
||||||
|
print('\nCombining these with the CLI defaults gives:\n')
|
||||||
|
pprint(cli_to_api(sys.argv[1:], True))
|
||||||
@@ -1,112 +0,0 @@
|
|||||||
# Unused
|
|
||||||
|
|
||||||
#!/usr/bin/env python3
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import io
|
|
||||||
import json
|
|
||||||
import mimetypes
|
|
||||||
import netrc
|
|
||||||
import optparse
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
from yt_dlp.compat import (
|
|
||||||
compat_basestring,
|
|
||||||
compat_getpass,
|
|
||||||
compat_print,
|
|
||||||
compat_urllib_request,
|
|
||||||
)
|
|
||||||
from yt_dlp.utils import (
|
|
||||||
make_HTTPS_handler,
|
|
||||||
sanitized_Request,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class GitHubReleaser(object):
|
|
||||||
_API_URL = 'https://api.github.com/repos/ytdl-org/youtube-dl/releases'
|
|
||||||
_UPLOADS_URL = 'https://uploads.github.com/repos/ytdl-org/youtube-dl/releases/%s/assets?name=%s'
|
|
||||||
_NETRC_MACHINE = 'github.com'
|
|
||||||
|
|
||||||
def __init__(self, debuglevel=0):
|
|
||||||
self._init_github_account()
|
|
||||||
https_handler = make_HTTPS_handler({}, debuglevel=debuglevel)
|
|
||||||
self._opener = compat_urllib_request.build_opener(https_handler)
|
|
||||||
|
|
||||||
def _init_github_account(self):
|
|
||||||
try:
|
|
||||||
info = netrc.netrc().authenticators(self._NETRC_MACHINE)
|
|
||||||
if info is not None:
|
|
||||||
self._token = info[2]
|
|
||||||
compat_print('Using GitHub credentials found in .netrc...')
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
compat_print('No GitHub credentials found in .netrc')
|
|
||||||
except (IOError, netrc.NetrcParseError):
|
|
||||||
compat_print('Unable to parse .netrc')
|
|
||||||
self._token = compat_getpass(
|
|
||||||
'Type your GitHub PAT (personal access token) and press [Return]: ')
|
|
||||||
|
|
||||||
def _call(self, req):
|
|
||||||
if isinstance(req, compat_basestring):
|
|
||||||
req = sanitized_Request(req)
|
|
||||||
req.add_header('Authorization', 'token %s' % self._token)
|
|
||||||
response = self._opener.open(req).read().decode('utf-8')
|
|
||||||
return json.loads(response)
|
|
||||||
|
|
||||||
def list_releases(self):
|
|
||||||
return self._call(self._API_URL)
|
|
||||||
|
|
||||||
def create_release(self, tag_name, name=None, body='', draft=False, prerelease=False):
|
|
||||||
data = {
|
|
||||||
'tag_name': tag_name,
|
|
||||||
'target_commitish': 'master',
|
|
||||||
'name': name,
|
|
||||||
'body': body,
|
|
||||||
'draft': draft,
|
|
||||||
'prerelease': prerelease,
|
|
||||||
}
|
|
||||||
req = sanitized_Request(self._API_URL, json.dumps(data).encode('utf-8'))
|
|
||||||
return self._call(req)
|
|
||||||
|
|
||||||
def create_asset(self, release_id, asset):
|
|
||||||
asset_name = os.path.basename(asset)
|
|
||||||
url = self._UPLOADS_URL % (release_id, asset_name)
|
|
||||||
# Our files are small enough to be loaded directly into memory.
|
|
||||||
data = open(asset, 'rb').read()
|
|
||||||
req = sanitized_Request(url, data)
|
|
||||||
mime_type, _ = mimetypes.guess_type(asset_name)
|
|
||||||
req.add_header('Content-Type', mime_type or 'application/octet-stream')
|
|
||||||
return self._call(req)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = optparse.OptionParser(usage='%prog CHANGELOG VERSION BUILDPATH')
|
|
||||||
options, args = parser.parse_args()
|
|
||||||
if len(args) != 3:
|
|
||||||
parser.error('Expected a version and a build directory')
|
|
||||||
|
|
||||||
changelog_file, version, build_path = args
|
|
||||||
|
|
||||||
with io.open(changelog_file, encoding='utf-8') as inf:
|
|
||||||
changelog = inf.read()
|
|
||||||
|
|
||||||
mobj = re.search(r'(?s)version %s\n{2}(.+?)\n{3}' % version, changelog)
|
|
||||||
body = mobj.group(1) if mobj else ''
|
|
||||||
|
|
||||||
releaser = GitHubReleaser()
|
|
||||||
|
|
||||||
new_release = releaser.create_release(
|
|
||||||
version, name='yt-dlp %s' % version, body=body)
|
|
||||||
release_id = new_release['id']
|
|
||||||
|
|
||||||
for asset in os.listdir(build_path):
|
|
||||||
compat_print('Uploading %s...' % asset)
|
|
||||||
releaser.create_asset(release_id, os.path.join(build_path, asset))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
@@ -1,12 +1,14 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import optparse
|
# Allow direct execution
|
||||||
import os
|
import os
|
||||||
from os.path import dirname as dirn
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
sys.path.insert(0, dirn(dirn((os.path.abspath(__file__)))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
|
import optparse
|
||||||
|
|
||||||
import yt_dlp
|
import yt_dlp
|
||||||
from yt_dlp.utils import shell_quote
|
from yt_dlp.utils import shell_quote
|
||||||
|
|
||||||
@@ -46,5 +48,5 @@ def build_completion(opt_parser):
|
|||||||
f.write(filled_template)
|
f.write(filled_template)
|
||||||
|
|
||||||
|
|
||||||
parser = yt_dlp.parseOpts()[0]
|
parser = yt_dlp.parseOpts(ignore_config_files=True)[0]
|
||||||
build_completion(parser)
|
build_completion(parser)
|
||||||
|
|||||||
@@ -1,15 +1,17 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
import codecs
|
import codecs
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
from yt_dlp.utils import intlist_to_bytes
|
|
||||||
from yt_dlp.aes import aes_encrypt, key_expansion
|
from yt_dlp.aes import aes_encrypt, key_expansion
|
||||||
|
from yt_dlp.utils import intlist_to_bytes
|
||||||
|
|
||||||
secret_msg = b'Secret message goes here'
|
secret_msg = b'Secret message goes here'
|
||||||
|
|
||||||
|
|||||||
@@ -1,43 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import json
|
|
||||||
import sys
|
|
||||||
import hashlib
|
|
||||||
import os.path
|
|
||||||
|
|
||||||
|
|
||||||
if len(sys.argv) <= 1:
|
|
||||||
print('Specify the version number as parameter')
|
|
||||||
sys.exit()
|
|
||||||
version = sys.argv[1]
|
|
||||||
|
|
||||||
with open('update/LATEST_VERSION', 'w') as f:
|
|
||||||
f.write(version)
|
|
||||||
|
|
||||||
versions_info = json.load(open('update/versions.json'))
|
|
||||||
if 'signature' in versions_info:
|
|
||||||
del versions_info['signature']
|
|
||||||
|
|
||||||
new_version = {}
|
|
||||||
|
|
||||||
filenames = {
|
|
||||||
'bin': 'yt-dlp',
|
|
||||||
'exe': 'yt-dlp.exe',
|
|
||||||
'tar': 'yt-dlp-%s.tar.gz' % version}
|
|
||||||
build_dir = os.path.join('..', '..', 'build', version)
|
|
||||||
for key, filename in filenames.items():
|
|
||||||
url = 'https://yt-dl.org/downloads/%s/%s' % (version, filename)
|
|
||||||
fn = os.path.join(build_dir, filename)
|
|
||||||
with open(fn, 'rb') as f:
|
|
||||||
data = f.read()
|
|
||||||
if not data:
|
|
||||||
raise ValueError('File %s is empty!' % fn)
|
|
||||||
sha256sum = hashlib.sha256(data).hexdigest()
|
|
||||||
new_version[key] = (url, sha256sum)
|
|
||||||
|
|
||||||
versions_info['versions'][version] = new_version
|
|
||||||
versions_info['latest'] = version
|
|
||||||
|
|
||||||
with open('update/versions.json', 'w') as jsonf:
|
|
||||||
json.dump(versions_info, jsonf, indent=4, sort_keys=True)
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
versions_info = json.load(open('update/versions.json'))
|
|
||||||
version = versions_info['latest']
|
|
||||||
version_dict = versions_info['versions'][version]
|
|
||||||
|
|
||||||
# Read template page
|
|
||||||
with open('download.html.in', 'r', encoding='utf-8') as tmplf:
|
|
||||||
template = tmplf.read()
|
|
||||||
|
|
||||||
template = template.replace('@PROGRAM_VERSION@', version)
|
|
||||||
template = template.replace('@PROGRAM_URL@', version_dict['bin'][0])
|
|
||||||
template = template.replace('@PROGRAM_SHA256SUM@', version_dict['bin'][1])
|
|
||||||
template = template.replace('@EXE_URL@', version_dict['exe'][0])
|
|
||||||
template = template.replace('@EXE_SHA256SUM@', version_dict['exe'][1])
|
|
||||||
template = template.replace('@TAR_URL@', version_dict['tar'][0])
|
|
||||||
template = template.replace('@TAR_SHA256SUM@', version_dict['tar'][1])
|
|
||||||
with open('download.html', 'w', encoding='utf-8') as dlf:
|
|
||||||
dlf.write(template)
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
from __future__ import unicode_literals, with_statement
|
|
||||||
|
|
||||||
import rsa
|
|
||||||
import json
|
|
||||||
from binascii import hexlify
|
|
||||||
|
|
||||||
try:
|
|
||||||
input = raw_input
|
|
||||||
except NameError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
versions_info = json.load(open('update/versions.json'))
|
|
||||||
if 'signature' in versions_info:
|
|
||||||
del versions_info['signature']
|
|
||||||
|
|
||||||
print('Enter the PKCS1 private key, followed by a blank line:')
|
|
||||||
privkey = b''
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
line = input()
|
|
||||||
except EOFError:
|
|
||||||
break
|
|
||||||
if line == '':
|
|
||||||
break
|
|
||||||
privkey += line.encode('ascii') + b'\n'
|
|
||||||
privkey = rsa.PrivateKey.load_pkcs1(privkey)
|
|
||||||
|
|
||||||
signature = hexlify(rsa.pkcs1.sign(json.dumps(versions_info, sort_keys=True).encode('utf-8'), privkey, 'SHA-256')).decode()
|
|
||||||
print('signature: ' + signature)
|
|
||||||
|
|
||||||
versions_info['signature'] = signature
|
|
||||||
with open('update/versions.json', 'w') as versionsf:
|
|
||||||
json.dump(versions_info, versionsf, indent=4, sort_keys=True)
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
# coding: utf-8
|
|
||||||
|
|
||||||
from __future__ import with_statement, unicode_literals
|
|
||||||
|
|
||||||
import datetime
|
|
||||||
import glob
|
|
||||||
import io # For Python 2 compatibility
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
|
|
||||||
year = str(datetime.datetime.now().year)
|
|
||||||
for fn in glob.glob('*.html*'):
|
|
||||||
with io.open(fn, encoding='utf-8') as f:
|
|
||||||
content = f.read()
|
|
||||||
newc = re.sub(r'(?P<copyright>Copyright © 2011-)(?P<year>[0-9]{4})', 'Copyright © 2011-' + year, content)
|
|
||||||
if content != newc:
|
|
||||||
tmpFn = fn + '.part'
|
|
||||||
with io.open(tmpFn, 'wt', encoding='utf-8') as outf:
|
|
||||||
outf.write(newc)
|
|
||||||
os.rename(tmpFn, fn)
|
|
||||||
@@ -1,76 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import datetime
|
|
||||||
import io
|
|
||||||
import json
|
|
||||||
import textwrap
|
|
||||||
|
|
||||||
|
|
||||||
atom_template = textwrap.dedent("""\
|
|
||||||
<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<feed xmlns="http://www.w3.org/2005/Atom">
|
|
||||||
<link rel="self" href="http://ytdl-org.github.io/youtube-dl/update/releases.atom" />
|
|
||||||
<title>yt-dlp releases</title>
|
|
||||||
<id>https://yt-dl.org/feed/yt-dlp-updates-feed</id>
|
|
||||||
<updated>@TIMESTAMP@</updated>
|
|
||||||
@ENTRIES@
|
|
||||||
</feed>""")
|
|
||||||
|
|
||||||
entry_template = textwrap.dedent("""
|
|
||||||
<entry>
|
|
||||||
<id>https://yt-dl.org/feed/yt-dlp-updates-feed/yt-dlp-@VERSION@</id>
|
|
||||||
<title>New version @VERSION@</title>
|
|
||||||
<link href="http://ytdl-org.github.io/yt-dlp" />
|
|
||||||
<content type="xhtml">
|
|
||||||
<div xmlns="http://www.w3.org/1999/xhtml">
|
|
||||||
Downloads available at <a href="https://yt-dl.org/downloads/@VERSION@/">https://yt-dl.org/downloads/@VERSION@/</a>
|
|
||||||
</div>
|
|
||||||
</content>
|
|
||||||
<author>
|
|
||||||
<name>The yt-dlp maintainers</name>
|
|
||||||
</author>
|
|
||||||
<updated>@TIMESTAMP@</updated>
|
|
||||||
</entry>
|
|
||||||
""")
|
|
||||||
|
|
||||||
now = datetime.datetime.now()
|
|
||||||
now_iso = now.isoformat() + 'Z'
|
|
||||||
|
|
||||||
atom_template = atom_template.replace('@TIMESTAMP@', now_iso)
|
|
||||||
|
|
||||||
versions_info = json.load(open('update/versions.json'))
|
|
||||||
versions = list(versions_info['versions'].keys())
|
|
||||||
versions.sort()
|
|
||||||
|
|
||||||
entries = []
|
|
||||||
for v in versions:
|
|
||||||
fields = v.split('.')
|
|
||||||
year, month, day = map(int, fields[:3])
|
|
||||||
faked = 0
|
|
||||||
patchlevel = 0
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
datetime.date(year, month, day)
|
|
||||||
except ValueError:
|
|
||||||
day -= 1
|
|
||||||
faked += 1
|
|
||||||
assert day > 0
|
|
||||||
continue
|
|
||||||
break
|
|
||||||
if len(fields) >= 4:
|
|
||||||
try:
|
|
||||||
patchlevel = int(fields[3])
|
|
||||||
except ValueError:
|
|
||||||
patchlevel = 1
|
|
||||||
timestamp = '%04d-%02d-%02dT00:%02d:%02dZ' % (year, month, day, faked, patchlevel)
|
|
||||||
|
|
||||||
entry = entry_template.replace('@TIMESTAMP@', timestamp)
|
|
||||||
entry = entry.replace('@VERSION@', v)
|
|
||||||
entries.append(entry)
|
|
||||||
|
|
||||||
entries_str = textwrap.indent(''.join(entries), '\t')
|
|
||||||
atom_template = atom_template.replace('@ENTRIES@', entries_str)
|
|
||||||
|
|
||||||
with io.open('update/releases.atom', 'w', encoding='utf-8') as atom_file:
|
|
||||||
atom_file.write(atom_template)
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
import textwrap
|
|
||||||
|
|
||||||
# We must be able to import yt_dlp
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
|
|
||||||
|
|
||||||
import yt_dlp
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
with open('supportedsites.html.in', 'r', encoding='utf-8') as tmplf:
|
|
||||||
template = tmplf.read()
|
|
||||||
|
|
||||||
ie_htmls = []
|
|
||||||
for ie in yt_dlp.list_extractors(age_limit=None):
|
|
||||||
ie_html = '<b>{}</b>'.format(ie.IE_NAME)
|
|
||||||
ie_desc = getattr(ie, 'IE_DESC', None)
|
|
||||||
if ie_desc is False:
|
|
||||||
continue
|
|
||||||
elif ie_desc is not None:
|
|
||||||
ie_html += ': {}'.format(ie.IE_DESC)
|
|
||||||
if not ie.working():
|
|
||||||
ie_html += ' (Currently broken)'
|
|
||||||
ie_htmls.append('<li>{}</li>'.format(ie_html))
|
|
||||||
|
|
||||||
template = template.replace('@SITES@', textwrap.indent('\n'.join(ie_htmls), '\t'))
|
|
||||||
|
|
||||||
with open('supportedsites.html', 'w', encoding='utf-8') as sitesf:
|
|
||||||
sitesf.write(template)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
81
devscripts/install_deps.py
Executable file
81
devscripts/install_deps.py
Executable file
@@ -0,0 +1,81 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Allow execution from anywhere
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from devscripts.tomlparse import parse_toml
|
||||||
|
from devscripts.utils import read_file
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args():
|
||||||
|
parser = argparse.ArgumentParser(description='Install dependencies for yt-dlp')
|
||||||
|
parser.add_argument(
|
||||||
|
'input', nargs='?', metavar='TOMLFILE', default=Path(__file__).parent.parent / 'pyproject.toml',
|
||||||
|
help='input file (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'-e', '--exclude', metavar='DEPENDENCY', action='append',
|
||||||
|
help='exclude a dependency')
|
||||||
|
parser.add_argument(
|
||||||
|
'-i', '--include', metavar='GROUP', action='append',
|
||||||
|
help='include an optional dependency group')
|
||||||
|
parser.add_argument(
|
||||||
|
'-o', '--only-optional', action='store_true',
|
||||||
|
help='only install optional dependencies')
|
||||||
|
parser.add_argument(
|
||||||
|
'-p', '--print', action='store_true',
|
||||||
|
help='only print requirements to stdout')
|
||||||
|
parser.add_argument(
|
||||||
|
'-u', '--user', action='store_true',
|
||||||
|
help='install with pip as --user')
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
args = parse_args()
|
||||||
|
project_table = parse_toml(read_file(args.input))['project']
|
||||||
|
recursive_pattern = re.compile(rf'{project_table["name"]}\[(?P<group_name>[\w-]+)\]')
|
||||||
|
optional_groups = project_table['optional-dependencies']
|
||||||
|
excludes = args.exclude or []
|
||||||
|
|
||||||
|
def yield_deps(group):
|
||||||
|
for dep in group:
|
||||||
|
if mobj := recursive_pattern.fullmatch(dep):
|
||||||
|
yield from optional_groups.get(mobj.group('group_name'), [])
|
||||||
|
else:
|
||||||
|
yield dep
|
||||||
|
|
||||||
|
targets = []
|
||||||
|
if not args.only_optional: # `-o` should exclude 'dependencies' and the 'default' group
|
||||||
|
targets.extend(project_table['dependencies'])
|
||||||
|
if 'default' not in excludes: # `--exclude default` should exclude entire 'default' group
|
||||||
|
targets.extend(yield_deps(optional_groups['default']))
|
||||||
|
|
||||||
|
for include in filter(None, map(optional_groups.get, args.include or [])):
|
||||||
|
targets.extend(yield_deps(include))
|
||||||
|
|
||||||
|
targets = [t for t in targets if re.match(r'[\w-]+', t).group(0).lower() not in excludes]
|
||||||
|
|
||||||
|
if args.print:
|
||||||
|
for target in targets:
|
||||||
|
print(target)
|
||||||
|
return
|
||||||
|
|
||||||
|
pip_args = [sys.executable, '-m', 'pip', 'install', '-U']
|
||||||
|
if args.user:
|
||||||
|
pip_args.append('--user')
|
||||||
|
pip_args.extend(targets)
|
||||||
|
|
||||||
|
return subprocess.call(pip_args)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.exit(main())
|
||||||
@@ -1,31 +1,39 @@
|
|||||||
# coding: utf-8
|
import importlib
|
||||||
|
import random
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from ..utils import bug_reports_message, write_string
|
from ..utils import (
|
||||||
|
age_restricted,
|
||||||
|
bug_reports_message,
|
||||||
|
classproperty,
|
||||||
|
variadic,
|
||||||
|
write_string,
|
||||||
|
)
|
||||||
|
|
||||||
|
# These bloat the lazy_extractors, so allow them to passthrough silently
|
||||||
|
ALLOWED_CLASSMETHODS = {'extract_from_webpage', 'get_testcases', 'get_webpage_testcases'}
|
||||||
|
_WARNED = False
|
||||||
|
|
||||||
|
|
||||||
class LazyLoadMetaClass(type):
|
class LazyLoadMetaClass(type):
|
||||||
def __getattr__(cls, name):
|
def __getattr__(cls, name):
|
||||||
if '_real_class' not in cls.__dict__:
|
global _WARNED
|
||||||
write_string(
|
if ('_real_class' not in cls.__dict__
|
||||||
f'WARNING: Falling back to normal extractor since lazy extractor '
|
and name not in ALLOWED_CLASSMETHODS and not _WARNED):
|
||||||
f'{cls.__name__} does not have attribute {name}{bug_reports_message()}')
|
_WARNED = True
|
||||||
return getattr(cls._get_real_class(), name)
|
write_string('WARNING: Falling back to normal extractor since lazy extractor '
|
||||||
|
f'{cls.__name__} does not have attribute {name}{bug_reports_message()}\n')
|
||||||
|
return getattr(cls.real_class, name)
|
||||||
|
|
||||||
|
|
||||||
class LazyLoadExtractor(metaclass=LazyLoadMetaClass):
|
class LazyLoadExtractor(metaclass=LazyLoadMetaClass):
|
||||||
_module = None
|
@classproperty
|
||||||
_WORKING = True
|
def real_class(cls):
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _get_real_class(cls):
|
|
||||||
if '_real_class' not in cls.__dict__:
|
if '_real_class' not in cls.__dict__:
|
||||||
mod = __import__(cls._module, fromlist=(cls.__name__,))
|
cls._real_class = getattr(importlib.import_module(cls._module), cls.__name__)
|
||||||
cls._real_class = getattr(mod, cls.__name__)
|
|
||||||
return cls._real_class
|
return cls._real_class
|
||||||
|
|
||||||
def __new__(cls, *args, **kwargs):
|
def __new__(cls, *args, **kwargs):
|
||||||
real_cls = cls._get_real_class()
|
instance = cls.real_class.__new__(cls.real_class)
|
||||||
instance = real_cls.__new__(real_cls)
|
|
||||||
instance.__init__(*args, **kwargs)
|
instance.__init__(*args, **kwargs)
|
||||||
return instance
|
return instance
|
||||||
|
|||||||
510
devscripts/make_changelog.py
Normal file
510
devscripts/make_changelog.py
Normal file
@@ -0,0 +1,510 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import enum
|
||||||
|
import itertools
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
from collections import defaultdict
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from functools import lru_cache
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from devscripts.utils import read_file, run_process, write_file
|
||||||
|
|
||||||
|
BASE_URL = 'https://github.com'
|
||||||
|
LOCATION_PATH = Path(__file__).parent
|
||||||
|
HASH_LENGTH = 7
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class CommitGroup(enum.Enum):
|
||||||
|
PRIORITY = 'Important'
|
||||||
|
CORE = 'Core'
|
||||||
|
EXTRACTOR = 'Extractor'
|
||||||
|
DOWNLOADER = 'Downloader'
|
||||||
|
POSTPROCESSOR = 'Postprocessor'
|
||||||
|
NETWORKING = 'Networking'
|
||||||
|
MISC = 'Misc.'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@lru_cache
|
||||||
|
def subgroup_lookup(cls):
|
||||||
|
return {
|
||||||
|
name: group
|
||||||
|
for group, names in {
|
||||||
|
cls.MISC: {
|
||||||
|
'build',
|
||||||
|
'ci',
|
||||||
|
'cleanup',
|
||||||
|
'devscripts',
|
||||||
|
'docs',
|
||||||
|
'test',
|
||||||
|
},
|
||||||
|
cls.NETWORKING: {
|
||||||
|
'rh',
|
||||||
|
},
|
||||||
|
}.items()
|
||||||
|
for name in names
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@lru_cache
|
||||||
|
def group_lookup(cls):
|
||||||
|
result = {
|
||||||
|
'fd': cls.DOWNLOADER,
|
||||||
|
'ie': cls.EXTRACTOR,
|
||||||
|
'pp': cls.POSTPROCESSOR,
|
||||||
|
'upstream': cls.CORE,
|
||||||
|
}
|
||||||
|
result.update({item.name.lower(): item for item in iter(cls)})
|
||||||
|
return result
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get(cls, value: str) -> tuple[CommitGroup | None, str | None]:
|
||||||
|
group, _, subgroup = (group.strip().lower() for group in value.partition('/'))
|
||||||
|
|
||||||
|
result = cls.group_lookup().get(group)
|
||||||
|
if not result:
|
||||||
|
if subgroup:
|
||||||
|
return None, value
|
||||||
|
subgroup = group
|
||||||
|
result = cls.subgroup_lookup().get(subgroup)
|
||||||
|
|
||||||
|
return result, subgroup or None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Commit:
|
||||||
|
hash: str | None
|
||||||
|
short: str
|
||||||
|
authors: list[str]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
result = f'{self.short!r}'
|
||||||
|
|
||||||
|
if self.hash:
|
||||||
|
result += f' ({self.hash[:HASH_LENGTH]})'
|
||||||
|
|
||||||
|
if self.authors:
|
||||||
|
authors = ', '.join(self.authors)
|
||||||
|
result += f' by {authors}'
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class CommitInfo:
|
||||||
|
details: str | None
|
||||||
|
sub_details: tuple[str, ...]
|
||||||
|
message: str
|
||||||
|
issues: list[str]
|
||||||
|
commit: Commit
|
||||||
|
fixes: list[Commit]
|
||||||
|
|
||||||
|
def key(self):
|
||||||
|
return ((self.details or '').lower(), self.sub_details, self.message)
|
||||||
|
|
||||||
|
|
||||||
|
def unique(items):
|
||||||
|
return sorted({item.strip().lower(): item for item in items if item}.values())
|
||||||
|
|
||||||
|
|
||||||
|
class Changelog:
|
||||||
|
MISC_RE = re.compile(r'(?:^|\b)(?:lint(?:ing)?|misc|format(?:ting)?|fixes)(?:\b|$)', re.IGNORECASE)
|
||||||
|
ALWAYS_SHOWN = (CommitGroup.PRIORITY,)
|
||||||
|
|
||||||
|
def __init__(self, groups, repo, collapsible=False):
|
||||||
|
self._groups = groups
|
||||||
|
self._repo = repo
|
||||||
|
self._collapsible = collapsible
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return '\n'.join(self._format_groups(self._groups)).replace('\t', ' ')
|
||||||
|
|
||||||
|
def _format_groups(self, groups):
|
||||||
|
first = True
|
||||||
|
for item in CommitGroup:
|
||||||
|
if self._collapsible and item not in self.ALWAYS_SHOWN and first:
|
||||||
|
first = False
|
||||||
|
yield '\n<details><summary><h3>Changelog</h3></summary>\n'
|
||||||
|
|
||||||
|
group = groups[item]
|
||||||
|
if group:
|
||||||
|
yield self.format_module(item.value, group)
|
||||||
|
|
||||||
|
if self._collapsible:
|
||||||
|
yield '\n</details>'
|
||||||
|
|
||||||
|
def format_module(self, name, group):
|
||||||
|
result = f'\n#### {name} changes\n' if name else '\n'
|
||||||
|
return result + '\n'.join(self._format_group(group))
|
||||||
|
|
||||||
|
def _format_group(self, group):
|
||||||
|
sorted_group = sorted(group, key=CommitInfo.key)
|
||||||
|
detail_groups = itertools.groupby(sorted_group, lambda item: (item.details or '').lower())
|
||||||
|
for _, items in detail_groups:
|
||||||
|
items = list(items)
|
||||||
|
details = items[0].details
|
||||||
|
|
||||||
|
if details == 'cleanup':
|
||||||
|
items = self._prepare_cleanup_misc_items(items)
|
||||||
|
|
||||||
|
prefix = '-'
|
||||||
|
if details:
|
||||||
|
if len(items) == 1:
|
||||||
|
prefix = f'- **{details}**:'
|
||||||
|
else:
|
||||||
|
yield f'- **{details}**'
|
||||||
|
prefix = '\t-'
|
||||||
|
|
||||||
|
sub_detail_groups = itertools.groupby(items, lambda item: tuple(map(str.lower, item.sub_details)))
|
||||||
|
for sub_details, entries in sub_detail_groups:
|
||||||
|
if not sub_details:
|
||||||
|
for entry in entries:
|
||||||
|
yield f'{prefix} {self.format_single_change(entry)}'
|
||||||
|
continue
|
||||||
|
|
||||||
|
entries = list(entries)
|
||||||
|
sub_prefix = f'{prefix} {", ".join(entries[0].sub_details)}'
|
||||||
|
if len(entries) == 1:
|
||||||
|
yield f'{sub_prefix}: {self.format_single_change(entries[0])}'
|
||||||
|
continue
|
||||||
|
|
||||||
|
yield sub_prefix
|
||||||
|
for entry in entries:
|
||||||
|
yield f'\t{prefix} {self.format_single_change(entry)}'
|
||||||
|
|
||||||
|
def _prepare_cleanup_misc_items(self, items):
|
||||||
|
cleanup_misc_items = defaultdict(list)
|
||||||
|
sorted_items = []
|
||||||
|
for item in items:
|
||||||
|
if self.MISC_RE.search(item.message):
|
||||||
|
cleanup_misc_items[tuple(item.commit.authors)].append(item)
|
||||||
|
else:
|
||||||
|
sorted_items.append(item)
|
||||||
|
|
||||||
|
for commit_infos in cleanup_misc_items.values():
|
||||||
|
sorted_items.append(CommitInfo(
|
||||||
|
'cleanup', ('Miscellaneous',), ', '.join(
|
||||||
|
self._format_message_link(None, info.commit.hash)
|
||||||
|
for info in sorted(commit_infos, key=lambda item: item.commit.hash or '')),
|
||||||
|
[], Commit(None, '', commit_infos[0].commit.authors), []))
|
||||||
|
|
||||||
|
return sorted_items
|
||||||
|
|
||||||
|
def format_single_change(self, info: CommitInfo):
|
||||||
|
message, sep, rest = info.message.partition('\n')
|
||||||
|
if '[' not in message:
|
||||||
|
# If the message doesn't already contain markdown links, try to add a link to the commit
|
||||||
|
message = self._format_message_link(message, info.commit.hash)
|
||||||
|
|
||||||
|
if info.issues:
|
||||||
|
message = f'{message} ({self._format_issues(info.issues)})'
|
||||||
|
|
||||||
|
if info.commit.authors:
|
||||||
|
message = f'{message} by {self._format_authors(info.commit.authors)}'
|
||||||
|
|
||||||
|
if info.fixes:
|
||||||
|
fix_message = ', '.join(f'{self._format_message_link(None, fix.hash)}' for fix in info.fixes)
|
||||||
|
|
||||||
|
authors = sorted({author for fix in info.fixes for author in fix.authors}, key=str.casefold)
|
||||||
|
if authors != info.commit.authors:
|
||||||
|
fix_message = f'{fix_message} by {self._format_authors(authors)}'
|
||||||
|
|
||||||
|
message = f'{message} (With fixes in {fix_message})'
|
||||||
|
|
||||||
|
return message if not sep else f'{message}{sep}{rest}'
|
||||||
|
|
||||||
|
def _format_message_link(self, message, hash):
|
||||||
|
assert message or hash, 'Improperly defined commit message or override'
|
||||||
|
message = message if message else hash[:HASH_LENGTH]
|
||||||
|
return f'[{message}]({self.repo_url}/commit/{hash})' if hash else message
|
||||||
|
|
||||||
|
def _format_issues(self, issues):
|
||||||
|
return ', '.join(f'[#{issue}]({self.repo_url}/issues/{issue})' for issue in issues)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _format_authors(authors):
|
||||||
|
return ', '.join(f'[{author}]({BASE_URL}/{author})' for author in authors)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def repo_url(self):
|
||||||
|
return f'{BASE_URL}/{self._repo}'
|
||||||
|
|
||||||
|
|
||||||
|
class CommitRange:
|
||||||
|
COMMAND = 'git'
|
||||||
|
COMMIT_SEPARATOR = '-----'
|
||||||
|
|
||||||
|
AUTHOR_INDICATOR_RE = re.compile(r'Authored by:? ', re.IGNORECASE)
|
||||||
|
MESSAGE_RE = re.compile(r'''
|
||||||
|
(?:\[(?P<prefix>[^\]]+)\]\ )?
|
||||||
|
(?:(?P<sub_details>`?[\w.-]+`?): )?
|
||||||
|
(?P<message>.+?)
|
||||||
|
(?:\ \((?P<issues>\#\d+(?:,\ \#\d+)*)\))?
|
||||||
|
''', re.VERBOSE | re.DOTALL)
|
||||||
|
EXTRACTOR_INDICATOR_RE = re.compile(r'(?:Fix|Add)\s+Extractors?', re.IGNORECASE)
|
||||||
|
REVERT_RE = re.compile(r'(?:\[[^\]]+\]\s+)?(?i:Revert)\s+([\da-f]{40})')
|
||||||
|
FIXES_RE = re.compile(r'(?i:Fix(?:es)?(?:\s+bugs?)?(?:\s+in|\s+for)?|Revert|Improve)\s+([\da-f]{40})')
|
||||||
|
UPSTREAM_MERGE_RE = re.compile(r'Update to ytdl-commit-([\da-f]+)')
|
||||||
|
|
||||||
|
def __init__(self, start, end, default_author=None):
|
||||||
|
self._start, self._end = start, end
|
||||||
|
self._commits, self._fixes = self._get_commits_and_fixes(default_author)
|
||||||
|
self._commits_added = []
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(itertools.chain(self._commits.values(), self._commits_added))
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self._commits) + len(self._commits_added)
|
||||||
|
|
||||||
|
def __contains__(self, commit):
|
||||||
|
if isinstance(commit, Commit):
|
||||||
|
if not commit.hash:
|
||||||
|
return False
|
||||||
|
commit = commit.hash
|
||||||
|
|
||||||
|
return commit in self._commits
|
||||||
|
|
||||||
|
def _get_commits_and_fixes(self, default_author):
|
||||||
|
result = run_process(
|
||||||
|
self.COMMAND, 'log', f'--format=%H%n%s%n%b%n{self.COMMIT_SEPARATOR}',
|
||||||
|
f'{self._start}..{self._end}' if self._start else self._end).stdout
|
||||||
|
|
||||||
|
commits, reverts = {}, {}
|
||||||
|
fixes = defaultdict(list)
|
||||||
|
lines = iter(result.splitlines(False))
|
||||||
|
for i, commit_hash in enumerate(lines):
|
||||||
|
short = next(lines)
|
||||||
|
skip = short.startswith('Release ') or short == '[version] update'
|
||||||
|
|
||||||
|
authors = [default_author] if default_author else []
|
||||||
|
for line in iter(lambda: next(lines), self.COMMIT_SEPARATOR):
|
||||||
|
match = self.AUTHOR_INDICATOR_RE.match(line)
|
||||||
|
if match:
|
||||||
|
authors = sorted(map(str.strip, line[match.end():].split(',')), key=str.casefold)
|
||||||
|
|
||||||
|
commit = Commit(commit_hash, short, authors)
|
||||||
|
if skip and (self._start or not i):
|
||||||
|
logger.debug(f'Skipped commit: {commit}')
|
||||||
|
continue
|
||||||
|
elif skip:
|
||||||
|
logger.debug(f'Reached Release commit, breaking: {commit}')
|
||||||
|
break
|
||||||
|
|
||||||
|
revert_match = self.REVERT_RE.fullmatch(commit.short)
|
||||||
|
if revert_match:
|
||||||
|
reverts[revert_match.group(1)] = commit
|
||||||
|
continue
|
||||||
|
|
||||||
|
fix_match = self.FIXES_RE.search(commit.short)
|
||||||
|
if fix_match:
|
||||||
|
commitish = fix_match.group(1)
|
||||||
|
fixes[commitish].append(commit)
|
||||||
|
|
||||||
|
commits[commit.hash] = commit
|
||||||
|
|
||||||
|
for commitish, revert_commit in reverts.items():
|
||||||
|
reverted = commits.pop(commitish, None)
|
||||||
|
if reverted:
|
||||||
|
logger.debug(f'{commitish} fully reverted {reverted}')
|
||||||
|
else:
|
||||||
|
commits[revert_commit.hash] = revert_commit
|
||||||
|
|
||||||
|
for commitish, fix_commits in fixes.items():
|
||||||
|
if commitish in commits:
|
||||||
|
hashes = ', '.join(commit.hash[:HASH_LENGTH] for commit in fix_commits)
|
||||||
|
logger.info(f'Found fix(es) for {commitish[:HASH_LENGTH]}: {hashes}')
|
||||||
|
for fix_commit in fix_commits:
|
||||||
|
del commits[fix_commit.hash]
|
||||||
|
else:
|
||||||
|
logger.debug(f'Commit with fixes not in changes: {commitish[:HASH_LENGTH]}')
|
||||||
|
|
||||||
|
return commits, fixes
|
||||||
|
|
||||||
|
def apply_overrides(self, overrides):
|
||||||
|
for override in overrides:
|
||||||
|
when = override.get('when')
|
||||||
|
if when and when not in self and when != self._start:
|
||||||
|
logger.debug(f'Ignored {when!r} override')
|
||||||
|
continue
|
||||||
|
|
||||||
|
override_hash = override.get('hash') or when
|
||||||
|
if override['action'] == 'add':
|
||||||
|
commit = Commit(override.get('hash'), override['short'], override.get('authors') or [])
|
||||||
|
logger.info(f'ADD {commit}')
|
||||||
|
self._commits_added.append(commit)
|
||||||
|
|
||||||
|
elif override['action'] == 'remove':
|
||||||
|
if override_hash in self._commits:
|
||||||
|
logger.info(f'REMOVE {self._commits[override_hash]}')
|
||||||
|
del self._commits[override_hash]
|
||||||
|
|
||||||
|
elif override['action'] == 'change':
|
||||||
|
if override_hash not in self._commits:
|
||||||
|
continue
|
||||||
|
commit = Commit(override_hash, override['short'], override.get('authors') or [])
|
||||||
|
logger.info(f'CHANGE {self._commits[commit.hash]} -> {commit}')
|
||||||
|
self._commits[commit.hash] = commit
|
||||||
|
|
||||||
|
self._commits = {key: value for key, value in reversed(self._commits.items())}
|
||||||
|
|
||||||
|
def groups(self):
|
||||||
|
group_dict = defaultdict(list)
|
||||||
|
for commit in self:
|
||||||
|
upstream_re = self.UPSTREAM_MERGE_RE.search(commit.short)
|
||||||
|
if upstream_re:
|
||||||
|
commit.short = f'[upstream] Merged with youtube-dl {upstream_re.group(1)}'
|
||||||
|
|
||||||
|
match = self.MESSAGE_RE.fullmatch(commit.short)
|
||||||
|
if not match:
|
||||||
|
logger.error(f'Error parsing short commit message: {commit.short!r}')
|
||||||
|
continue
|
||||||
|
|
||||||
|
prefix, sub_details_alt, message, issues = match.groups()
|
||||||
|
issues = [issue.strip()[1:] for issue in issues.split(',')] if issues else []
|
||||||
|
|
||||||
|
if prefix:
|
||||||
|
groups, details, sub_details = zip(*map(self.details_from_prefix, prefix.split(',')))
|
||||||
|
group = next(iter(filter(None, groups)), None)
|
||||||
|
details = ', '.join(unique(details))
|
||||||
|
sub_details = list(itertools.chain.from_iterable(sub_details))
|
||||||
|
else:
|
||||||
|
group = CommitGroup.CORE
|
||||||
|
details = None
|
||||||
|
sub_details = []
|
||||||
|
|
||||||
|
if sub_details_alt:
|
||||||
|
sub_details.append(sub_details_alt)
|
||||||
|
sub_details = tuple(unique(sub_details))
|
||||||
|
|
||||||
|
if not group:
|
||||||
|
if self.EXTRACTOR_INDICATOR_RE.search(commit.short):
|
||||||
|
group = CommitGroup.EXTRACTOR
|
||||||
|
logger.error(f'Assuming [ie] group for {commit.short!r}')
|
||||||
|
else:
|
||||||
|
group = CommitGroup.CORE
|
||||||
|
|
||||||
|
commit_info = CommitInfo(
|
||||||
|
details, sub_details, message.strip(),
|
||||||
|
issues, commit, self._fixes[commit.hash])
|
||||||
|
|
||||||
|
logger.debug(f'Resolved {commit.short!r} to {commit_info!r}')
|
||||||
|
group_dict[group].append(commit_info)
|
||||||
|
|
||||||
|
return group_dict
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def details_from_prefix(prefix):
|
||||||
|
if not prefix:
|
||||||
|
return CommitGroup.CORE, None, ()
|
||||||
|
|
||||||
|
prefix, *sub_details = prefix.split(':')
|
||||||
|
|
||||||
|
group, details = CommitGroup.get(prefix)
|
||||||
|
if group is CommitGroup.PRIORITY and details:
|
||||||
|
details = details.partition('/')[2].strip()
|
||||||
|
|
||||||
|
if details and '/' in details:
|
||||||
|
logger.error(f'Prefix is overnested, using first part: {prefix}')
|
||||||
|
details = details.partition('/')[0].strip()
|
||||||
|
|
||||||
|
if details == 'common':
|
||||||
|
details = None
|
||||||
|
elif group is CommitGroup.NETWORKING and details == 'rh':
|
||||||
|
details = 'Request Handler'
|
||||||
|
|
||||||
|
return group, details, sub_details
|
||||||
|
|
||||||
|
|
||||||
|
def get_new_contributors(contributors_path, commits):
|
||||||
|
contributors = set()
|
||||||
|
if contributors_path.exists():
|
||||||
|
for line in read_file(contributors_path).splitlines():
|
||||||
|
author, _, _ = line.strip().partition(' (')
|
||||||
|
authors = author.split('/')
|
||||||
|
contributors.update(map(str.casefold, authors))
|
||||||
|
|
||||||
|
new_contributors = set()
|
||||||
|
for commit in commits:
|
||||||
|
for author in commit.authors:
|
||||||
|
author_folded = author.casefold()
|
||||||
|
if author_folded not in contributors:
|
||||||
|
contributors.add(author_folded)
|
||||||
|
new_contributors.add(author)
|
||||||
|
|
||||||
|
return sorted(new_contributors, key=str.casefold)
|
||||||
|
|
||||||
|
|
||||||
|
def create_changelog(args):
|
||||||
|
logging.basicConfig(
|
||||||
|
datefmt='%Y-%m-%d %H-%M-%S', format='{asctime} | {levelname:<8} | {message}',
|
||||||
|
level=logging.WARNING - 10 * args.verbosity, style='{', stream=sys.stderr)
|
||||||
|
|
||||||
|
commits = CommitRange(None, args.commitish, args.default_author)
|
||||||
|
|
||||||
|
if not args.no_override:
|
||||||
|
if args.override_path.exists():
|
||||||
|
overrides = json.loads(read_file(args.override_path))
|
||||||
|
commits.apply_overrides(overrides)
|
||||||
|
else:
|
||||||
|
logger.warning(f'File {args.override_path.as_posix()} does not exist')
|
||||||
|
|
||||||
|
logger.info(f'Loaded {len(commits)} commits')
|
||||||
|
|
||||||
|
new_contributors = get_new_contributors(args.contributors_path, commits)
|
||||||
|
if new_contributors:
|
||||||
|
if args.contributors:
|
||||||
|
write_file(args.contributors_path, '\n'.join(new_contributors) + '\n', mode='a')
|
||||||
|
logger.info(f'New contributors: {", ".join(new_contributors)}')
|
||||||
|
|
||||||
|
return Changelog(commits.groups(), args.repo, args.collapsible)
|
||||||
|
|
||||||
|
|
||||||
|
def create_parser():
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description='Create a changelog markdown from a git commit range')
|
||||||
|
parser.add_argument(
|
||||||
|
'commitish', default='HEAD', nargs='?',
|
||||||
|
help='The commitish to create the range from (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'-v', '--verbosity', action='count', default=0,
|
||||||
|
help='increase verbosity (can be used twice)')
|
||||||
|
parser.add_argument(
|
||||||
|
'-c', '--contributors', action='store_true',
|
||||||
|
help='update CONTRIBUTORS file (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'--contributors-path', type=Path, default=LOCATION_PATH.parent / 'CONTRIBUTORS',
|
||||||
|
help='path to the CONTRIBUTORS file')
|
||||||
|
parser.add_argument(
|
||||||
|
'--no-override', action='store_true',
|
||||||
|
help='skip override json in commit generation (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'--override-path', type=Path, default=LOCATION_PATH / 'changelog_override.json',
|
||||||
|
help='path to the changelog_override.json file')
|
||||||
|
parser.add_argument(
|
||||||
|
'--default-author', default='pukkandan',
|
||||||
|
help='the author to use without a author indicator (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'--repo', default='yt-dlp/yt-dlp',
|
||||||
|
help='the github repository to use for the operations (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'--collapsible', action='store_true',
|
||||||
|
help='make changelog collapsible (default: %(default)s)')
|
||||||
|
|
||||||
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
print(create_changelog(create_parser().parse_args()))
|
||||||
@@ -1,7 +1,5 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import io
|
|
||||||
import optparse
|
import optparse
|
||||||
import re
|
import re
|
||||||
|
|
||||||
@@ -16,7 +14,7 @@ def main():
|
|||||||
|
|
||||||
infile, outfile = args
|
infile, outfile = args
|
||||||
|
|
||||||
with io.open(infile, encoding='utf-8') as inf:
|
with open(infile, encoding='utf-8') as inf:
|
||||||
readme = inf.read()
|
readme = inf.read()
|
||||||
|
|
||||||
bug_text = re.search(
|
bug_text = re.search(
|
||||||
@@ -26,7 +24,7 @@ def main():
|
|||||||
|
|
||||||
out = bug_text + dev_text
|
out = bug_text + dev_text
|
||||||
|
|
||||||
with io.open(outfile, 'w', encoding='utf-8') as outf:
|
with open(outfile, 'w', encoding='utf-8') as outf:
|
||||||
outf.write(out)
|
outf.write(out)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,29 +1,72 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import io
|
# Allow direct execution
|
||||||
import optparse
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
from devscripts.utils import get_filename_args, read_file, write_file
|
||||||
|
|
||||||
|
VERBOSE_TMPL = '''
|
||||||
|
- type: checkboxes
|
||||||
|
id: verbose
|
||||||
|
attributes:
|
||||||
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
options:
|
||||||
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
|
required: true
|
||||||
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
|
required: false
|
||||||
|
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: log
|
||||||
|
attributes:
|
||||||
|
label: Complete Verbose Output
|
||||||
|
description: |
|
||||||
|
It should start like this:
|
||||||
|
placeholder: |
|
||||||
|
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||||
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
||||||
|
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||||
|
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||||
|
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||||
|
[debug] Proxy map: {}
|
||||||
|
[debug] Request Handlers: urllib, requests
|
||||||
|
[debug] Loaded 1893 extractors
|
||||||
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
||||||
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
|
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||||
|
<more lines>
|
||||||
|
render: shell
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
'''.strip()
|
||||||
|
|
||||||
|
NO_SKIP = '''
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||||
|
description: Fill all fields even if you think it is irrelevant for the issue
|
||||||
|
options:
|
||||||
|
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\\* field
|
||||||
|
required: true
|
||||||
|
'''.strip()
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = optparse.OptionParser(usage='%prog INFILE OUTFILE')
|
fields = {'no_skip': NO_SKIP}
|
||||||
options, args = parser.parse_args()
|
fields['verbose'] = VERBOSE_TMPL % fields
|
||||||
if len(args) != 2:
|
fields['verbose_optional'] = re.sub(r'(\n\s+validations:)?\n\s+required: true', '', fields['verbose'])
|
||||||
parser.error('Expected an input and an output filename')
|
|
||||||
|
|
||||||
infile, outfile = args
|
infile, outfile = get_filename_args(has_infile=True)
|
||||||
|
write_file(outfile, read_file(infile) % fields)
|
||||||
|
|
||||||
with io.open(infile, encoding='utf-8') as inf:
|
|
||||||
issue_template_tmpl = inf.read()
|
|
||||||
|
|
||||||
# Get the version from yt_dlp/version.py without importing the package
|
|
||||||
exec(compile(open('yt_dlp/version.py').read(),
|
|
||||||
'yt_dlp/version.py', 'exec'))
|
|
||||||
|
|
||||||
out = issue_template_tmpl % {'version': locals()['__version__']}
|
|
||||||
|
|
||||||
with io.open(outfile, 'w', encoding='utf-8') as outf:
|
|
||||||
outf.write(out)
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -1,105 +1,132 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
from __future__ import unicode_literals, print_function
|
|
||||||
|
|
||||||
from inspect import getsource
|
# Allow direct execution
|
||||||
import io
|
|
||||||
import os
|
import os
|
||||||
from os.path import dirname as dirn
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
sys.path.insert(0, dirn(dirn((os.path.abspath(__file__)))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
lazy_extractors_filename = sys.argv[1] if len(sys.argv) > 1 else 'yt_dlp/extractor/lazy_extractors.py'
|
|
||||||
if os.path.exists(lazy_extractors_filename):
|
|
||||||
os.remove(lazy_extractors_filename)
|
|
||||||
|
|
||||||
# Block plugins from loading
|
from inspect import getsource
|
||||||
plugins_dirname = 'ytdlp_plugins'
|
|
||||||
plugins_blocked_dirname = 'ytdlp_plugins_blocked'
|
|
||||||
if os.path.exists(plugins_dirname):
|
|
||||||
os.rename(plugins_dirname, plugins_blocked_dirname)
|
|
||||||
|
|
||||||
from yt_dlp.extractor import _ALL_CLASSES
|
from devscripts.utils import get_filename_args, read_file, write_file
|
||||||
from yt_dlp.extractor.common import InfoExtractor, SearchInfoExtractor
|
|
||||||
|
|
||||||
if os.path.exists(plugins_blocked_dirname):
|
NO_ATTR = object()
|
||||||
os.rename(plugins_blocked_dirname, plugins_dirname)
|
STATIC_CLASS_PROPERTIES = [
|
||||||
|
'IE_NAME', '_ENABLED', '_VALID_URL', # Used for URL matching
|
||||||
with open('devscripts/lazy_load_template.py', 'rt') as f:
|
'_WORKING', 'IE_DESC', '_NETRC_MACHINE', 'SEARCH_KEY', # Used for --extractor-descriptions
|
||||||
module_template = f.read()
|
'age_limit', # Used for --age-limit (evaluated)
|
||||||
|
'_RETURN_TYPE', # Accessed in CLI only with instance (evaluated)
|
||||||
CLASS_PROPERTIES = ['ie_key', 'working', '_match_valid_url', 'suitable', '_match_id', 'get_temp_id']
|
]
|
||||||
module_contents = [
|
CLASS_METHODS = [
|
||||||
module_template,
|
'ie_key', 'suitable', '_match_valid_url', # Used for URL matching
|
||||||
*[getsource(getattr(InfoExtractor, k)) for k in CLASS_PROPERTIES],
|
'working', 'get_temp_id', '_match_id', # Accessed just before instance creation
|
||||||
'\nclass LazyLoadSearchExtractor(LazyLoadExtractor):\n pass\n']
|
'description', # Used for --extractor-descriptions
|
||||||
|
'is_suitable', # Used for --age-limit
|
||||||
ie_template = '''
|
'supports_login', 'is_single_video', # Accessed in CLI only with instance
|
||||||
|
]
|
||||||
|
IE_TEMPLATE = '''
|
||||||
class {name}({bases}):
|
class {name}({bases}):
|
||||||
_module = '{module}'
|
_module = {module!r}
|
||||||
'''
|
'''
|
||||||
|
MODULE_TEMPLATE = read_file('devscripts/lazy_load_template.py')
|
||||||
|
|
||||||
|
|
||||||
def get_base_name(base):
|
def main():
|
||||||
if base is InfoExtractor:
|
lazy_extractors_filename = get_filename_args(default_outfile='yt_dlp/extractor/lazy_extractors.py')
|
||||||
return 'LazyLoadExtractor'
|
if os.path.exists(lazy_extractors_filename):
|
||||||
elif base is SearchInfoExtractor:
|
os.remove(lazy_extractors_filename)
|
||||||
return 'LazyLoadSearchExtractor'
|
|
||||||
else:
|
_ALL_CLASSES = get_all_ies() # Must be before import
|
||||||
return base.__name__
|
|
||||||
|
import yt_dlp.plugins
|
||||||
|
from yt_dlp.extractor.common import InfoExtractor, SearchInfoExtractor
|
||||||
|
|
||||||
|
# Filter out plugins
|
||||||
|
_ALL_CLASSES = [cls for cls in _ALL_CLASSES if not cls.__module__.startswith(f'{yt_dlp.plugins.PACKAGE_NAME}.')]
|
||||||
|
|
||||||
|
DummyInfoExtractor = type('InfoExtractor', (InfoExtractor,), {'IE_NAME': NO_ATTR})
|
||||||
|
module_src = '\n'.join((
|
||||||
|
MODULE_TEMPLATE,
|
||||||
|
' _module = None',
|
||||||
|
*extra_ie_code(DummyInfoExtractor),
|
||||||
|
'\nclass LazyLoadSearchExtractor(LazyLoadExtractor):\n pass\n',
|
||||||
|
*build_ies(_ALL_CLASSES, (InfoExtractor, SearchInfoExtractor), DummyInfoExtractor),
|
||||||
|
))
|
||||||
|
|
||||||
|
write_file(lazy_extractors_filename, f'{module_src}\n')
|
||||||
|
|
||||||
|
|
||||||
def build_lazy_ie(ie, name):
|
def get_all_ies():
|
||||||
s = ie_template.format(
|
PLUGINS_DIRNAME = 'ytdlp_plugins'
|
||||||
name=name,
|
BLOCKED_DIRNAME = f'{PLUGINS_DIRNAME}_blocked'
|
||||||
bases=', '.join(map(get_base_name, ie.__bases__)),
|
if os.path.exists(PLUGINS_DIRNAME):
|
||||||
module=ie.__module__)
|
# os.rename cannot be used, e.g. in Docker. See https://github.com/yt-dlp/yt-dlp/pull/4958
|
||||||
valid_url = getattr(ie, '_VALID_URL', None)
|
shutil.move(PLUGINS_DIRNAME, BLOCKED_DIRNAME)
|
||||||
if not valid_url and hasattr(ie, '_make_valid_url'):
|
try:
|
||||||
valid_url = ie._make_valid_url()
|
from yt_dlp.extractor.extractors import _ALL_CLASSES
|
||||||
if valid_url:
|
finally:
|
||||||
s += f' _VALID_URL = {valid_url!r}\n'
|
if os.path.exists(BLOCKED_DIRNAME):
|
||||||
if not ie._WORKING:
|
shutil.move(BLOCKED_DIRNAME, PLUGINS_DIRNAME)
|
||||||
s += ' _WORKING = False\n'
|
return _ALL_CLASSES
|
||||||
if ie.suitable.__func__ is not InfoExtractor.suitable.__func__:
|
|
||||||
s += f'\n{getsource(ie.suitable)}'
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
# find the correct sorting and add the required base classes so that subclasses
|
def extra_ie_code(ie, base=None):
|
||||||
# can be correctly created
|
for var in STATIC_CLASS_PROPERTIES:
|
||||||
classes = _ALL_CLASSES[:-1]
|
val = getattr(ie, var)
|
||||||
ordered_cls = []
|
if val != (getattr(base, var) if base else NO_ATTR):
|
||||||
while classes:
|
yield f' {var} = {val!r}'
|
||||||
for c in classes[:]:
|
yield ''
|
||||||
bases = set(c.__bases__) - set((object, InfoExtractor, SearchInfoExtractor))
|
|
||||||
stop = False
|
|
||||||
for b in bases:
|
|
||||||
if b not in classes and b not in ordered_cls:
|
|
||||||
if b.__name__ == 'GenericIE':
|
|
||||||
exit()
|
|
||||||
classes.insert(0, b)
|
|
||||||
stop = True
|
|
||||||
if stop:
|
|
||||||
break
|
|
||||||
if all(b in ordered_cls for b in bases):
|
|
||||||
ordered_cls.append(c)
|
|
||||||
classes.remove(c)
|
|
||||||
break
|
|
||||||
ordered_cls.append(_ALL_CLASSES[-1])
|
|
||||||
|
|
||||||
names = []
|
for name in CLASS_METHODS:
|
||||||
for ie in ordered_cls:
|
f = getattr(ie, name)
|
||||||
name = ie.__name__
|
if not base or f.__func__ != getattr(base, name).__func__:
|
||||||
src = build_lazy_ie(ie, name)
|
yield getsource(f)
|
||||||
module_contents.append(src)
|
|
||||||
if ie in _ALL_CLASSES:
|
|
||||||
names.append(name)
|
|
||||||
|
|
||||||
module_contents.append(
|
|
||||||
'\n_ALL_CLASSES = [{0}]'.format(', '.join(names)))
|
|
||||||
|
|
||||||
module_src = '\n'.join(module_contents) + '\n'
|
def build_ies(ies, bases, attr_base):
|
||||||
|
names = []
|
||||||
|
for ie in sort_ies(ies, bases):
|
||||||
|
yield build_lazy_ie(ie, ie.__name__, attr_base)
|
||||||
|
if ie in ies:
|
||||||
|
names.append(ie.__name__)
|
||||||
|
|
||||||
with io.open(lazy_extractors_filename, 'wt', encoding='utf-8') as f:
|
yield f'\n_ALL_CLASSES = [{", ".join(names)}]'
|
||||||
f.write(module_src)
|
|
||||||
|
|
||||||
|
def sort_ies(ies, ignored_bases):
|
||||||
|
"""find the correct sorting and add the required base classes so that subclasses can be correctly created"""
|
||||||
|
classes, returned_classes = ies[:-1], set()
|
||||||
|
assert ies[-1].__name__ == 'GenericIE', 'Last IE must be GenericIE'
|
||||||
|
while classes:
|
||||||
|
for c in classes[:]:
|
||||||
|
bases = set(c.__bases__) - {object, *ignored_bases}
|
||||||
|
restart = False
|
||||||
|
for b in sorted(bases, key=lambda x: x.__name__):
|
||||||
|
if b not in classes and b not in returned_classes:
|
||||||
|
assert b.__name__ != 'GenericIE', 'Cannot inherit from GenericIE'
|
||||||
|
classes.insert(0, b)
|
||||||
|
restart = True
|
||||||
|
if restart:
|
||||||
|
break
|
||||||
|
if bases <= returned_classes:
|
||||||
|
yield c
|
||||||
|
returned_classes.add(c)
|
||||||
|
classes.remove(c)
|
||||||
|
break
|
||||||
|
yield ies[-1]
|
||||||
|
|
||||||
|
|
||||||
|
def build_lazy_ie(ie, name, attr_base):
|
||||||
|
bases = ', '.join({
|
||||||
|
'InfoExtractor': 'LazyLoadExtractor',
|
||||||
|
'SearchInfoExtractor': 'LazyLoadSearchExtractor',
|
||||||
|
}.get(base.__name__, base.__name__) for base in ie.__bases__)
|
||||||
|
|
||||||
|
s = IE_TEMPLATE.format(name=name, module=ie.__module__, bases=bases)
|
||||||
|
return s + '\n'.join(extra_ie_code(ie, attr_base))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
|
|||||||
@@ -1,31 +1,93 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
# yt-dlp --help | make_readme.py
|
"""
|
||||||
# This must be run in a console of correct width
|
yt-dlp --help | make_readme.py
|
||||||
|
This must be run in a console of correct width
|
||||||
|
"""
|
||||||
|
|
||||||
from __future__ import unicode_literals
|
# Allow direct execution
|
||||||
|
import os
|
||||||
import io
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
|
import functools
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
from devscripts.utils import read_file, write_file
|
||||||
|
|
||||||
README_FILE = 'README.md'
|
README_FILE = 'README.md'
|
||||||
helptext = sys.stdin.read()
|
|
||||||
|
|
||||||
if isinstance(helptext, bytes):
|
OPTIONS_START = 'General Options:'
|
||||||
helptext = helptext.decode('utf-8')
|
OPTIONS_END = 'CONFIGURATION'
|
||||||
|
EPILOG_START = 'See full documentation'
|
||||||
|
ALLOWED_OVERSHOOT = 2
|
||||||
|
|
||||||
with io.open(README_FILE, encoding='utf-8') as f:
|
DISABLE_PATCH = object()
|
||||||
oldreadme = f.read()
|
|
||||||
|
|
||||||
header = oldreadme[:oldreadme.index('## General Options:')]
|
|
||||||
footer = oldreadme[oldreadme.index('# CONFIGURATION'):]
|
|
||||||
|
|
||||||
options = helptext[helptext.index(' General Options:'):]
|
def take_section(text, start=None, end=None, *, shift=0):
|
||||||
options = re.sub(r'(?m)^ (\w.+)$', r'## \1', options)
|
return text[
|
||||||
options = options + '\n'
|
text.index(start) + shift if start else None:
|
||||||
|
text.index(end) + shift if end else None
|
||||||
|
]
|
||||||
|
|
||||||
with io.open(README_FILE, 'w', encoding='utf-8') as f:
|
|
||||||
f.write(header)
|
def apply_patch(text, patch):
|
||||||
f.write(options)
|
return text if patch[0] is DISABLE_PATCH else re.sub(*patch, text)
|
||||||
f.write(footer)
|
|
||||||
|
|
||||||
|
options = take_section(sys.stdin.read(), f'\n {OPTIONS_START}', f'\n{EPILOG_START}', shift=1)
|
||||||
|
|
||||||
|
max_width = max(map(len, options.split('\n')))
|
||||||
|
switch_col_width = len(re.search(r'(?m)^\s{5,}', options).group())
|
||||||
|
delim = f'\n{" " * switch_col_width}'
|
||||||
|
|
||||||
|
PATCHES = (
|
||||||
|
( # Standardize `--update` message
|
||||||
|
r'(?m)^( -U, --update\s+).+(\n \s.+)*$',
|
||||||
|
r'\1Update this program to the latest version',
|
||||||
|
),
|
||||||
|
( # Headings
|
||||||
|
r'(?m)^ (\w.+\n)( (?=\w))?',
|
||||||
|
r'## \1'
|
||||||
|
),
|
||||||
|
( # Fixup `--date` formatting
|
||||||
|
rf'(?m)( --date DATE.+({delim}[^\[]+)*)\[.+({delim}.+)*$',
|
||||||
|
(rf'\1[now|today|yesterday][-N[day|week|month|year]].{delim}'
|
||||||
|
f'E.g. "--date today-2weeks" downloads only{delim}'
|
||||||
|
'videos uploaded on the same day two weeks ago'),
|
||||||
|
),
|
||||||
|
( # Do not split URLs
|
||||||
|
rf'({delim[:-1]})? (?P<label>\[\S+\] )?(?P<url>https?({delim})?:({delim})?/({delim})?/(({delim})?\S+)+)\s',
|
||||||
|
lambda mobj: ''.join((delim, mobj.group('label') or '', re.sub(r'\s+', '', mobj.group('url')), '\n'))
|
||||||
|
),
|
||||||
|
( # Do not split "words"
|
||||||
|
rf'(?m)({delim}\S+)+$',
|
||||||
|
lambda mobj: ''.join((delim, mobj.group(0).replace(delim, '')))
|
||||||
|
),
|
||||||
|
( # Allow overshooting last line
|
||||||
|
rf'(?m)^(?P<prev>.+)${delim}(?P<current>.+)$(?!{delim})',
|
||||||
|
lambda mobj: (mobj.group().replace(delim, ' ')
|
||||||
|
if len(mobj.group()) - len(delim) + 1 <= max_width + ALLOWED_OVERSHOOT
|
||||||
|
else mobj.group())
|
||||||
|
),
|
||||||
|
( # Avoid newline when a space is available b/w switch and description
|
||||||
|
DISABLE_PATCH, # This creates issues with prepare_manpage
|
||||||
|
r'(?m)^(\s{4}-.{%d})(%s)' % (switch_col_width - 6, delim),
|
||||||
|
r'\1 '
|
||||||
|
),
|
||||||
|
( # Replace brackets with a Markdown link
|
||||||
|
r'SponsorBlock API \((http.+)\)',
|
||||||
|
r'[SponsorBlock API](\1)'
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
readme = read_file(README_FILE)
|
||||||
|
|
||||||
|
write_file(README_FILE, ''.join((
|
||||||
|
take_section(readme, end=f'## {OPTIONS_START}'),
|
||||||
|
functools.reduce(apply_patch, PATCHES, options),
|
||||||
|
take_section(readme, f'# {OPTIONS_END}'),
|
||||||
|
)))
|
||||||
|
|||||||
@@ -1,47 +1,19 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import io
|
# Allow direct execution
|
||||||
import optparse
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
# Import yt_dlp
|
|
||||||
ROOT_DIR = os.path.join(os.path.dirname(__file__), '..')
|
from devscripts.utils import get_filename_args, write_file
|
||||||
sys.path.insert(0, ROOT_DIR)
|
from yt_dlp.extractor import list_extractor_classes
|
||||||
import yt_dlp
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = optparse.OptionParser(usage='%prog OUTFILE.md')
|
out = '\n'.join(ie.description() for ie in list_extractor_classes() if ie.IE_DESC is not False)
|
||||||
options, args = parser.parse_args()
|
write_file(get_filename_args(), f'# Supported sites\n{out}\n')
|
||||||
if len(args) != 1:
|
|
||||||
parser.error('Expected an output filename')
|
|
||||||
|
|
||||||
outfile, = args
|
|
||||||
|
|
||||||
def gen_ies_md(ies):
|
|
||||||
for ie in ies:
|
|
||||||
ie_md = '**{0}**'.format(ie.IE_NAME)
|
|
||||||
if ie.IE_DESC is False:
|
|
||||||
continue
|
|
||||||
if ie.IE_DESC is not None:
|
|
||||||
ie_md += ': {0}'.format(ie.IE_DESC)
|
|
||||||
search_key = getattr(ie, 'SEARCH_KEY', None)
|
|
||||||
if search_key is not None:
|
|
||||||
ie_md += f'; "{ie.SEARCH_KEY}:" prefix'
|
|
||||||
if not ie.working():
|
|
||||||
ie_md += ' (Currently broken)'
|
|
||||||
yield ie_md
|
|
||||||
|
|
||||||
ies = sorted(yt_dlp.gen_extractors(), key=lambda i: i.IE_NAME.lower())
|
|
||||||
out = '# Supported sites\n' + ''.join(
|
|
||||||
' - ' + md + '\n'
|
|
||||||
for md in gen_ies_md(ies))
|
|
||||||
|
|
||||||
with io.open(outfile, 'w', encoding='utf-8') as outf:
|
|
||||||
outf.write(out)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
@@ -1,6 +0,0 @@
|
|||||||
|
|
||||||
# source this file in your shell to get a POSIX locale (which will break many programs, but that's kind of the point)
|
|
||||||
|
|
||||||
export LC_ALL=POSIX
|
|
||||||
export LANG=POSIX
|
|
||||||
export LANGUAGE=POSIX
|
|
||||||
@@ -1,11 +1,22 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import io
|
# Allow direct execution
|
||||||
import optparse
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
from devscripts.utils import (
|
||||||
|
compose_functions,
|
||||||
|
get_filename_args,
|
||||||
|
read_file,
|
||||||
|
write_file,
|
||||||
|
)
|
||||||
|
|
||||||
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||||
README_FILE = os.path.join(ROOT_DIR, 'README.md')
|
README_FILE = os.path.join(ROOT_DIR, 'README.md')
|
||||||
|
|
||||||
@@ -13,7 +24,7 @@ PREFIX = r'''%yt-dlp(1)
|
|||||||
|
|
||||||
# NAME
|
# NAME
|
||||||
|
|
||||||
yt\-dlp \- A youtube-dl fork with additional features and patches
|
yt\-dlp \- A feature\-rich command\-line audio/video downloader
|
||||||
|
|
||||||
# SYNOPSIS
|
# SYNOPSIS
|
||||||
|
|
||||||
@@ -24,25 +35,6 @@ yt\-dlp \- A youtube-dl fork with additional features and patches
|
|||||||
'''
|
'''
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = optparse.OptionParser(usage='%prog OUTFILE.md')
|
|
||||||
options, args = parser.parse_args()
|
|
||||||
if len(args) != 1:
|
|
||||||
parser.error('Expected an output filename')
|
|
||||||
|
|
||||||
outfile, = args
|
|
||||||
|
|
||||||
with io.open(README_FILE, encoding='utf-8') as f:
|
|
||||||
readme = f.read()
|
|
||||||
|
|
||||||
readme = filter_excluded_sections(readme)
|
|
||||||
readme = move_sections(readme)
|
|
||||||
readme = filter_options(readme)
|
|
||||||
|
|
||||||
with io.open(outfile, 'w', encoding='utf-8') as outf:
|
|
||||||
outf.write(PREFIX + readme)
|
|
||||||
|
|
||||||
|
|
||||||
def filter_excluded_sections(readme):
|
def filter_excluded_sections(readme):
|
||||||
EXCLUDED_SECTION_BEGIN_STRING = re.escape('<!-- MANPAGE: BEGIN EXCLUDED SECTION -->')
|
EXCLUDED_SECTION_BEGIN_STRING = re.escape('<!-- MANPAGE: BEGIN EXCLUDED SECTION -->')
|
||||||
EXCLUDED_SECTION_END_STRING = re.escape('<!-- MANPAGE: END EXCLUDED SECTION -->')
|
EXCLUDED_SECTION_END_STRING = re.escape('<!-- MANPAGE: END EXCLUDED SECTION -->')
|
||||||
@@ -51,6 +43,27 @@ def filter_excluded_sections(readme):
|
|||||||
'', readme)
|
'', readme)
|
||||||
|
|
||||||
|
|
||||||
|
def _convert_code_blocks(readme):
|
||||||
|
current_code_block = None
|
||||||
|
|
||||||
|
for line in readme.splitlines(True):
|
||||||
|
if current_code_block:
|
||||||
|
if line == current_code_block:
|
||||||
|
current_code_block = None
|
||||||
|
yield '\n'
|
||||||
|
else:
|
||||||
|
yield f' {line}'
|
||||||
|
elif line.startswith('```'):
|
||||||
|
current_code_block = line.count('`') * '`' + '\n'
|
||||||
|
yield '\n'
|
||||||
|
else:
|
||||||
|
yield line
|
||||||
|
|
||||||
|
|
||||||
|
def convert_code_blocks(readme):
|
||||||
|
return ''.join(_convert_code_blocks(readme))
|
||||||
|
|
||||||
|
|
||||||
def move_sections(readme):
|
def move_sections(readme):
|
||||||
MOVE_TAG_TEMPLATE = '<!-- MANPAGE: MOVE "%s" SECTION HERE -->'
|
MOVE_TAG_TEMPLATE = '<!-- MANPAGE: MOVE "%s" SECTION HERE -->'
|
||||||
sections = re.findall(r'(?m)^%s$' % (
|
sections = re.findall(r'(?m)^%s$' % (
|
||||||
@@ -73,8 +86,10 @@ def move_sections(readme):
|
|||||||
|
|
||||||
def filter_options(readme):
|
def filter_options(readme):
|
||||||
section = re.search(r'(?sm)^# USAGE AND OPTIONS\n.+?(?=^# )', readme).group(0)
|
section = re.search(r'(?sm)^# USAGE AND OPTIONS\n.+?(?=^# )', readme).group(0)
|
||||||
|
section_new = section.replace('*', R'\*')
|
||||||
|
|
||||||
options = '# OPTIONS\n'
|
options = '# OPTIONS\n'
|
||||||
for line in section.split('\n')[1:]:
|
for line in section_new.split('\n')[1:]:
|
||||||
mobj = re.fullmatch(r'''(?x)
|
mobj = re.fullmatch(r'''(?x)
|
||||||
\s{4}(?P<opt>-(?:,\s|[^\s])+)
|
\s{4}(?P<opt>-(?:,\s|[^\s])+)
|
||||||
(?:\s(?P<meta>(?:[^\s]|\s(?!\s))+))?
|
(?:\s(?P<meta>(?:[^\s]|\s(?!\s))+))?
|
||||||
@@ -94,5 +109,12 @@ def filter_options(readme):
|
|||||||
return readme.replace(section, options, 1)
|
return readme.replace(section, options, 1)
|
||||||
|
|
||||||
|
|
||||||
|
TRANSFORM = compose_functions(filter_excluded_sections, convert_code_blocks, move_sections, filter_options)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
write_file(get_filename_args(), PREFIX + TRANSFORM(read_file(README_FILE)))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -1,143 +0,0 @@
|
|||||||
# Unused
|
|
||||||
|
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# IMPORTANT: the following assumptions are made
|
|
||||||
# * the GH repo is on the origin remote
|
|
||||||
# * the gh-pages branch is named so locally
|
|
||||||
# * the git config user.signingkey is properly set
|
|
||||||
|
|
||||||
# You will need
|
|
||||||
# pip install coverage nose rsa wheel
|
|
||||||
|
|
||||||
# TODO
|
|
||||||
# release notes
|
|
||||||
# make hash on local files
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
skip_tests=true
|
|
||||||
gpg_sign_commits=""
|
|
||||||
buildserver='localhost:8142'
|
|
||||||
|
|
||||||
while true
|
|
||||||
do
|
|
||||||
case "$1" in
|
|
||||||
--run-tests)
|
|
||||||
skip_tests=false
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
--gpg-sign-commits|-S)
|
|
||||||
gpg_sign_commits="-S"
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
--buildserver)
|
|
||||||
buildserver="$2"
|
|
||||||
shift 2
|
|
||||||
;;
|
|
||||||
--*)
|
|
||||||
echo "ERROR: unknown option $1"
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
break
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
if [ -z "$1" ]; then echo "ERROR: specify version number like this: $0 1994.09.06"; exit 1; fi
|
|
||||||
version="$1"
|
|
||||||
major_version=$(echo "$version" | sed -n 's#^\([0-9]*\.[0-9]*\.[0-9]*\).*#\1#p')
|
|
||||||
if test "$major_version" '!=' "$(date '+%Y.%m.%d')"; then
|
|
||||||
echo "$version does not start with today's date!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ! -z "`git tag | grep "$version"`" ]; then echo 'ERROR: version already present'; exit 1; fi
|
|
||||||
if [ ! -z "`git status --porcelain | grep -v CHANGELOG`" ]; then echo 'ERROR: the working directory is not clean; commit or stash changes'; exit 1; fi
|
|
||||||
useless_files=$(find yt_dlp -type f -not -name '*.py')
|
|
||||||
if [ ! -z "$useless_files" ]; then echo "ERROR: Non-.py files in yt_dlp: $useless_files"; exit 1; fi
|
|
||||||
if [ ! -f "updates_key.pem" ]; then echo 'ERROR: updates_key.pem missing'; exit 1; fi
|
|
||||||
if ! type pandoc >/dev/null 2>/dev/null; then echo 'ERROR: pandoc is missing'; exit 1; fi
|
|
||||||
if ! python3 -c 'import rsa' 2>/dev/null; then echo 'ERROR: python3-rsa is missing'; exit 1; fi
|
|
||||||
if ! python3 -c 'import wheel' 2>/dev/null; then echo 'ERROR: wheel is missing'; exit 1; fi
|
|
||||||
|
|
||||||
read -p "Is Changelog up to date? (y/n) " -n 1
|
|
||||||
if [[ ! $REPLY =~ ^[Yy]$ ]]; then exit 1; fi
|
|
||||||
|
|
||||||
/bin/echo -e "\n### First of all, testing..."
|
|
||||||
make clean
|
|
||||||
if $skip_tests ; then
|
|
||||||
echo 'SKIPPING TESTS'
|
|
||||||
else
|
|
||||||
nosetests --verbose --with-coverage --cover-package=yt_dlp --cover-html test --stop || exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
/bin/echo -e "\n### Changing version in version.py..."
|
|
||||||
sed -i "s/__version__ = '.*'/__version__ = '$version'/" yt_dlp/version.py
|
|
||||||
|
|
||||||
/bin/echo -e "\n### Changing version in Changelog..."
|
|
||||||
sed -i "s/<unreleased>/$version/" Changelog.md
|
|
||||||
|
|
||||||
/bin/echo -e "\n### Committing documentation, templates and yt_dlp/version.py..."
|
|
||||||
make README.md CONTRIBUTING.md issuetemplates supportedsites
|
|
||||||
git add README.md CONTRIBUTING.md .github/ISSUE_TEMPLATE/1_broken_site.md .github/ISSUE_TEMPLATE/2_site_support_request.md .github/ISSUE_TEMPLATE/3_site_feature_request.md .github/ISSUE_TEMPLATE/4_bug_report.md .github/ISSUE_TEMPLATE/5_feature_request.md .github/ISSUE_TEMPLATE/6_question.md docs/supportedsites.md yt_dlp/version.py Changelog.md
|
|
||||||
git commit $gpg_sign_commits -m "release $version"
|
|
||||||
|
|
||||||
/bin/echo -e "\n### Now tagging, signing and pushing..."
|
|
||||||
git tag -s -m "Release $version" "$version"
|
|
||||||
git show "$version"
|
|
||||||
read -p "Is it good, can I push? (y/n) " -n 1
|
|
||||||
if [[ ! $REPLY =~ ^[Yy]$ ]]; then exit 1; fi
|
|
||||||
echo
|
|
||||||
MASTER=$(git rev-parse --abbrev-ref HEAD)
|
|
||||||
git push origin $MASTER:master
|
|
||||||
git push origin "$version"
|
|
||||||
|
|
||||||
/bin/echo -e "\n### OK, now it is time to build the binaries..."
|
|
||||||
REV=$(git rev-parse HEAD)
|
|
||||||
make yt-dlp yt-dlp.tar.gz
|
|
||||||
read -p "VM running? (y/n) " -n 1
|
|
||||||
wget "http://$buildserver/build/ytdl-org/youtube-dl/yt-dlp.exe?rev=$REV" -O yt-dlp.exe
|
|
||||||
mkdir -p "build/$version"
|
|
||||||
mv yt-dlp yt-dlp.exe "build/$version"
|
|
||||||
mv yt-dlp.tar.gz "build/$version/yt-dlp-$version.tar.gz"
|
|
||||||
RELEASE_FILES="yt-dlp yt-dlp.exe yt-dlp-$version.tar.gz"
|
|
||||||
(cd build/$version/ && md5sum $RELEASE_FILES > MD5SUMS)
|
|
||||||
(cd build/$version/ && sha1sum $RELEASE_FILES > SHA1SUMS)
|
|
||||||
(cd build/$version/ && sha256sum $RELEASE_FILES > SHA2-256SUMS)
|
|
||||||
(cd build/$version/ && sha512sum $RELEASE_FILES > SHA2-512SUMS)
|
|
||||||
|
|
||||||
/bin/echo -e "\n### Signing and uploading the new binaries to GitHub..."
|
|
||||||
for f in $RELEASE_FILES; do gpg --passphrase-repeat 5 --detach-sig "build/$version/$f"; done
|
|
||||||
|
|
||||||
ROOT=$(pwd)
|
|
||||||
python devscripts/create-github-release.py Changelog.md $version "$ROOT/build/$version"
|
|
||||||
|
|
||||||
ssh ytdl@yt-dl.org "sh html/update_latest.sh $version"
|
|
||||||
|
|
||||||
/bin/echo -e "\n### Now switching to gh-pages..."
|
|
||||||
git clone --branch gh-pages --single-branch . build/gh-pages
|
|
||||||
(
|
|
||||||
set -e
|
|
||||||
ORIGIN_URL=$(git config --get remote.origin.url)
|
|
||||||
cd build/gh-pages
|
|
||||||
"$ROOT/devscripts/gh-pages/add-version.py" $version
|
|
||||||
"$ROOT/devscripts/gh-pages/update-feed.py"
|
|
||||||
"$ROOT/devscripts/gh-pages/sign-versions.py" < "$ROOT/updates_key.pem"
|
|
||||||
"$ROOT/devscripts/gh-pages/generate-download.py"
|
|
||||||
"$ROOT/devscripts/gh-pages/update-copyright.py"
|
|
||||||
"$ROOT/devscripts/gh-pages/update-sites.py"
|
|
||||||
git add *.html *.html.in update
|
|
||||||
git commit $gpg_sign_commits -m "release $version"
|
|
||||||
git push "$ROOT" gh-pages
|
|
||||||
git push "$ORIGIN_URL" gh-pages
|
|
||||||
)
|
|
||||||
rm -rf build
|
|
||||||
|
|
||||||
make pypi-files
|
|
||||||
echo "Uploading to PyPi ..."
|
|
||||||
python setup.py sdist bdist_wheel upload
|
|
||||||
make clean
|
|
||||||
|
|
||||||
/bin/echo -e "\n### DONE!"
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
@setlocal
|
|
||||||
@echo off
|
|
||||||
cd /d %~dp0..
|
|
||||||
|
|
||||||
if ["%~1"]==[""] (
|
|
||||||
set "test_set="test""
|
|
||||||
) else if ["%~1"]==["core"] (
|
|
||||||
set "test_set="-m not download""
|
|
||||||
) else if ["%~1"]==["download"] (
|
|
||||||
set "test_set="-m "download""
|
|
||||||
) else (
|
|
||||||
echo.Invalid test type "%~1". Use "core" ^| "download"
|
|
||||||
exit /b 1
|
|
||||||
)
|
|
||||||
|
|
||||||
pytest %test_set%
|
|
||||||
75
devscripts/run_tests.py
Executable file
75
devscripts/run_tests.py
Executable file
@@ -0,0 +1,75 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import functools
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shlex
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
fix_test_name = functools.partial(re.compile(r'IE(_all|_\d+)?$').sub, r'\1')
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args():
|
||||||
|
parser = argparse.ArgumentParser(description='Run selected yt-dlp tests')
|
||||||
|
parser.add_argument(
|
||||||
|
'test', help='a extractor tests, or one of "core" or "download"', nargs='*')
|
||||||
|
parser.add_argument(
|
||||||
|
'-k', help='run a test matching EXPRESSION. Same as "pytest -k"', metavar='EXPRESSION')
|
||||||
|
parser.add_argument(
|
||||||
|
'--pytest-args', help='arguments to passthrough to pytest')
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def run_tests(*tests, pattern=None, ci=False):
|
||||||
|
run_core = 'core' in tests or (not pattern and not tests)
|
||||||
|
run_download = 'download' in tests
|
||||||
|
tests = list(map(fix_test_name, tests))
|
||||||
|
|
||||||
|
pytest_args = args.pytest_args or os.getenv('HATCH_TEST_ARGS', '')
|
||||||
|
arguments = ['pytest', '-Werror', '--tb=short', *shlex.split(pytest_args)]
|
||||||
|
if ci:
|
||||||
|
arguments.append('--color=yes')
|
||||||
|
if pattern:
|
||||||
|
arguments.extend(['-k', pattern])
|
||||||
|
if run_core:
|
||||||
|
arguments.extend(['-m', 'not download'])
|
||||||
|
elif run_download:
|
||||||
|
arguments.extend(['-m', 'download'])
|
||||||
|
else:
|
||||||
|
arguments.extend(
|
||||||
|
f'test/test_download.py::TestDownload::test_{test}' for test in tests)
|
||||||
|
|
||||||
|
print(f'Running {arguments}', flush=True)
|
||||||
|
try:
|
||||||
|
return subprocess.call(arguments)
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
arguments = [sys.executable, '-Werror', '-m', 'unittest']
|
||||||
|
if pattern:
|
||||||
|
arguments.extend(['-k', pattern])
|
||||||
|
if run_core:
|
||||||
|
print('"pytest" needs to be installed to run core tests', file=sys.stderr, flush=True)
|
||||||
|
return 1
|
||||||
|
elif run_download:
|
||||||
|
arguments.append('test.test_download')
|
||||||
|
else:
|
||||||
|
arguments.extend(
|
||||||
|
f'test.test_download.TestDownload.test_{test}' for test in tests)
|
||||||
|
|
||||||
|
print(f'Running {arguments}', flush=True)
|
||||||
|
return subprocess.call(arguments)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
try:
|
||||||
|
args = parse_args()
|
||||||
|
|
||||||
|
os.chdir(Path(__file__).parent.parent)
|
||||||
|
sys.exit(run_tests(*args.test, pattern=args.k, ci=bool(os.getenv('CI'))))
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
if [ -z $1 ]; then
|
|
||||||
test_set='test'
|
|
||||||
elif [ $1 = 'core' ]; then
|
|
||||||
test_set="-m not download"
|
|
||||||
elif [ $1 = 'download' ]; then
|
|
||||||
test_set="-m download"
|
|
||||||
else
|
|
||||||
echo 'Invalid test type "'$1'". Use "core" | "download"'
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
python3 -m pytest "$test_set"
|
|
||||||
36
devscripts/set-variant.py
Normal file
36
devscripts/set-variant.py
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import functools
|
||||||
|
import re
|
||||||
|
|
||||||
|
from devscripts.utils import compose_functions, read_file, write_file
|
||||||
|
|
||||||
|
VERSION_FILE = 'yt_dlp/version.py'
|
||||||
|
|
||||||
|
|
||||||
|
def parse_options():
|
||||||
|
parser = argparse.ArgumentParser(description='Set the build variant of the package')
|
||||||
|
parser.add_argument('variant', help='Name of the variant')
|
||||||
|
parser.add_argument('-M', '--update-message', default=None, help='Message to show in -U')
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def property_setter(name, value):
|
||||||
|
return functools.partial(re.sub, rf'(?m)^{name}\s*=\s*.+$', f'{name} = {value!r}')
|
||||||
|
|
||||||
|
|
||||||
|
opts = parse_options()
|
||||||
|
transform = compose_functions(
|
||||||
|
property_setter('VARIANT', opts.variant),
|
||||||
|
property_setter('UPDATE_HINT', opts.update_message)
|
||||||
|
)
|
||||||
|
|
||||||
|
write_file(VERSION_FILE, transform(read_file(VERSION_FILE)))
|
||||||
@@ -1,49 +0,0 @@
|
|||||||
# Unused
|
|
||||||
|
|
||||||
#!/usr/bin/env python3
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import itertools
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
from yt_dlp.compat import (
|
|
||||||
compat_print,
|
|
||||||
compat_urllib_request,
|
|
||||||
)
|
|
||||||
from yt_dlp.utils import format_bytes
|
|
||||||
|
|
||||||
|
|
||||||
def format_size(bytes):
|
|
||||||
return '%s (%d bytes)' % (format_bytes(bytes), bytes)
|
|
||||||
|
|
||||||
|
|
||||||
total_bytes = 0
|
|
||||||
|
|
||||||
for page in itertools.count(1):
|
|
||||||
releases = json.loads(compat_urllib_request.urlopen(
|
|
||||||
'https://api.github.com/repos/ytdl-org/youtube-dl/releases?page=%s' % page
|
|
||||||
).read().decode('utf-8'))
|
|
||||||
|
|
||||||
if not releases:
|
|
||||||
break
|
|
||||||
|
|
||||||
for release in releases:
|
|
||||||
compat_print(release['name'])
|
|
||||||
for asset in release['assets']:
|
|
||||||
asset_name = asset['name']
|
|
||||||
total_bytes += asset['download_count'] * asset['size']
|
|
||||||
if all(not re.match(p, asset_name) for p in (
|
|
||||||
r'^yt-dlp$',
|
|
||||||
r'^yt-dlp-\d{4}\.\d{2}\.\d{2}(?:\.\d+)?\.tar\.gz$',
|
|
||||||
r'^yt-dlp\.exe$')):
|
|
||||||
continue
|
|
||||||
compat_print(
|
|
||||||
' %s size: %s downloads: %d'
|
|
||||||
% (asset_name, format_size(asset['size']), asset['download_count']))
|
|
||||||
|
|
||||||
compat_print('total downloads traffic: %s' % format_size(total_bytes))
|
|
||||||
189
devscripts/tomlparse.py
Executable file
189
devscripts/tomlparse.py
Executable file
@@ -0,0 +1,189 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
"""
|
||||||
|
Simple parser for spec compliant toml files
|
||||||
|
|
||||||
|
A simple toml parser for files that comply with the spec.
|
||||||
|
Should only be used to parse `pyproject.toml` for `install_deps.py`.
|
||||||
|
|
||||||
|
IMPORTANT: INVALID FILES OR MULTILINE STRINGS ARE NOT SUPPORTED!
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import datetime as dt
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
|
||||||
|
WS = r'(?:[\ \t]*)'
|
||||||
|
STRING_RE = re.compile(r'"(?:\\.|[^\\"\n])*"|\'[^\'\n]*\'')
|
||||||
|
SINGLE_KEY_RE = re.compile(rf'{STRING_RE.pattern}|[A-Za-z0-9_-]+')
|
||||||
|
KEY_RE = re.compile(rf'{WS}(?:{SINGLE_KEY_RE.pattern}){WS}(?:\.{WS}(?:{SINGLE_KEY_RE.pattern}){WS})*')
|
||||||
|
EQUALS_RE = re.compile(rf'={WS}')
|
||||||
|
WS_RE = re.compile(WS)
|
||||||
|
|
||||||
|
_SUBTABLE = rf'(?P<subtable>^\[(?P<is_list>\[)?(?P<path>{KEY_RE.pattern})\]\]?)'
|
||||||
|
EXPRESSION_RE = re.compile(rf'^(?:{_SUBTABLE}|{KEY_RE.pattern}=)', re.MULTILINE)
|
||||||
|
|
||||||
|
LIST_WS_RE = re.compile(rf'{WS}((#[^\n]*)?\n{WS})*')
|
||||||
|
LEFTOVER_VALUE_RE = re.compile(r'[^,}\]\t\n#]+')
|
||||||
|
|
||||||
|
|
||||||
|
def parse_key(value: str):
|
||||||
|
for match in SINGLE_KEY_RE.finditer(value):
|
||||||
|
if match[0][0] == '"':
|
||||||
|
yield json.loads(match[0])
|
||||||
|
elif match[0][0] == '\'':
|
||||||
|
yield match[0][1:-1]
|
||||||
|
else:
|
||||||
|
yield match[0]
|
||||||
|
|
||||||
|
|
||||||
|
def get_target(root: dict, paths: list[str], is_list=False):
|
||||||
|
target = root
|
||||||
|
|
||||||
|
for index, key in enumerate(paths, 1):
|
||||||
|
use_list = is_list and index == len(paths)
|
||||||
|
result = target.get(key)
|
||||||
|
if result is None:
|
||||||
|
result = [] if use_list else {}
|
||||||
|
target[key] = result
|
||||||
|
|
||||||
|
if isinstance(result, dict):
|
||||||
|
target = result
|
||||||
|
elif use_list:
|
||||||
|
target = {}
|
||||||
|
result.append(target)
|
||||||
|
else:
|
||||||
|
target = result[-1]
|
||||||
|
|
||||||
|
assert isinstance(target, dict)
|
||||||
|
return target
|
||||||
|
|
||||||
|
|
||||||
|
def parse_enclosed(data: str, index: int, end: str, ws_re: re.Pattern):
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
if match := ws_re.match(data, index):
|
||||||
|
index = match.end()
|
||||||
|
|
||||||
|
while data[index] != end:
|
||||||
|
index = yield True, index
|
||||||
|
|
||||||
|
if match := ws_re.match(data, index):
|
||||||
|
index = match.end()
|
||||||
|
|
||||||
|
if data[index] == ',':
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
if match := ws_re.match(data, index):
|
||||||
|
index = match.end()
|
||||||
|
|
||||||
|
assert data[index] == end
|
||||||
|
yield False, index + 1
|
||||||
|
|
||||||
|
|
||||||
|
def parse_value(data: str, index: int):
|
||||||
|
if data[index] == '[':
|
||||||
|
result = []
|
||||||
|
|
||||||
|
indices = parse_enclosed(data, index, ']', LIST_WS_RE)
|
||||||
|
valid, index = next(indices)
|
||||||
|
while valid:
|
||||||
|
index, value = parse_value(data, index)
|
||||||
|
result.append(value)
|
||||||
|
valid, index = indices.send(index)
|
||||||
|
|
||||||
|
return index, result
|
||||||
|
|
||||||
|
if data[index] == '{':
|
||||||
|
result = {}
|
||||||
|
|
||||||
|
indices = parse_enclosed(data, index, '}', WS_RE)
|
||||||
|
valid, index = next(indices)
|
||||||
|
while valid:
|
||||||
|
valid, index = indices.send(parse_kv_pair(data, index, result))
|
||||||
|
|
||||||
|
return index, result
|
||||||
|
|
||||||
|
if match := STRING_RE.match(data, index):
|
||||||
|
return match.end(), json.loads(match[0]) if match[0][0] == '"' else match[0][1:-1]
|
||||||
|
|
||||||
|
match = LEFTOVER_VALUE_RE.match(data, index)
|
||||||
|
assert match
|
||||||
|
value = match[0].strip()
|
||||||
|
for func in [
|
||||||
|
int,
|
||||||
|
float,
|
||||||
|
dt.time.fromisoformat,
|
||||||
|
dt.date.fromisoformat,
|
||||||
|
dt.datetime.fromisoformat,
|
||||||
|
{'true': True, 'false': False}.get,
|
||||||
|
]:
|
||||||
|
try:
|
||||||
|
value = func(value)
|
||||||
|
break
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return match.end(), value
|
||||||
|
|
||||||
|
|
||||||
|
def parse_kv_pair(data: str, index: int, target: dict):
|
||||||
|
match = KEY_RE.match(data, index)
|
||||||
|
if not match:
|
||||||
|
return None
|
||||||
|
|
||||||
|
*keys, key = parse_key(match[0])
|
||||||
|
|
||||||
|
match = EQUALS_RE.match(data, match.end())
|
||||||
|
assert match
|
||||||
|
index = match.end()
|
||||||
|
|
||||||
|
index, value = parse_value(data, index)
|
||||||
|
get_target(target, keys)[key] = value
|
||||||
|
return index
|
||||||
|
|
||||||
|
|
||||||
|
def parse_toml(data: str):
|
||||||
|
root = {}
|
||||||
|
target = root
|
||||||
|
|
||||||
|
index = 0
|
||||||
|
while True:
|
||||||
|
match = EXPRESSION_RE.search(data, index)
|
||||||
|
if not match:
|
||||||
|
break
|
||||||
|
|
||||||
|
if match.group('subtable'):
|
||||||
|
index = match.end()
|
||||||
|
path, is_list = match.group('path', 'is_list')
|
||||||
|
target = get_target(root, list(parse_key(path)), bool(is_list))
|
||||||
|
continue
|
||||||
|
|
||||||
|
index = parse_kv_pair(data, match.start(), target)
|
||||||
|
assert index is not None
|
||||||
|
|
||||||
|
return root
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
import argparse
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument('infile', type=Path, help='The TOML file to read as input')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
with args.infile.open('r', encoding='utf-8') as file:
|
||||||
|
data = file.read()
|
||||||
|
|
||||||
|
def default(obj):
|
||||||
|
if isinstance(obj, (dt.date, dt.time, dt.datetime)):
|
||||||
|
return obj.isoformat()
|
||||||
|
|
||||||
|
print(json.dumps(parse_toml(data), default=default))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
from yt_dlp.compat import compat_urllib_request
|
|
||||||
|
|
||||||
|
|
||||||
# usage: python3 ./devscripts/update-formulae.py <path-to-formulae-rb> <version>
|
|
||||||
# version can be either 0-aligned (yt-dlp version) or normalized (PyPl version)
|
|
||||||
|
|
||||||
filename, version = sys.argv[1:]
|
|
||||||
|
|
||||||
normalized_version = '.'.join(str(int(x)) for x in version.split('.'))
|
|
||||||
|
|
||||||
pypi_release = json.loads(compat_urllib_request.urlopen(
|
|
||||||
'https://pypi.org/pypi/yt-dlp/%s/json' % normalized_version
|
|
||||||
).read().decode('utf-8'))
|
|
||||||
|
|
||||||
tarball_file = next(x for x in pypi_release['urls'] if x['filename'].endswith('.tar.gz'))
|
|
||||||
|
|
||||||
sha256sum = tarball_file['digests']['sha256']
|
|
||||||
url = tarball_file['url']
|
|
||||||
|
|
||||||
with open(filename, 'r') as r:
|
|
||||||
formulae_text = r.read()
|
|
||||||
|
|
||||||
formulae_text = re.sub(r'sha256 "[0-9a-f]*?"', 'sha256 "%s"' % sha256sum, formulae_text)
|
|
||||||
formulae_text = re.sub(r'url "[^"]*?"', 'url "%s"' % url, formulae_text)
|
|
||||||
|
|
||||||
with open(filename, 'w') as w:
|
|
||||||
w.write(formulae_text)
|
|
||||||
@@ -1,42 +1,82 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
from datetime import datetime
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
import sys
|
import sys
|
||||||
import subprocess
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
with open('yt_dlp/version.py', 'rt') as f:
|
import argparse
|
||||||
exec(compile(f.read(), 'yt_dlp/version.py', 'exec'))
|
import contextlib
|
||||||
old_version = locals()['__version__']
|
import datetime as dt
|
||||||
|
import sys
|
||||||
|
|
||||||
old_version_list = old_version.split('.')
|
from devscripts.utils import read_version, run_process, write_file
|
||||||
|
|
||||||
old_ver = '.'.join(old_version_list[:3])
|
|
||||||
old_rev = old_version_list[3] if len(old_version_list) > 3 else ''
|
|
||||||
|
|
||||||
ver = datetime.utcnow().strftime("%Y.%m.%d")
|
def get_new_version(version, revision):
|
||||||
|
if not version:
|
||||||
|
version = dt.datetime.now(dt.timezone.utc).strftime('%Y.%m.%d')
|
||||||
|
|
||||||
rev = (sys.argv[1:] or [''])[0] # Use first argument, if present as revision number
|
if revision:
|
||||||
if not rev:
|
assert revision.isdecimal(), 'Revision must be a number'
|
||||||
rev = str(int(old_rev or 0) + 1) if old_ver == ver else ''
|
else:
|
||||||
|
old_version = read_version().split('.')
|
||||||
|
if version.split('.') == old_version[:3]:
|
||||||
|
revision = str(int((old_version + [0])[3]) + 1)
|
||||||
|
|
||||||
VERSION = '.'.join((ver, rev)) if rev else ver
|
return f'{version}.{revision}' if revision else version
|
||||||
|
|
||||||
try:
|
|
||||||
sp = subprocess.Popen(['git', 'rev-parse', '--short', 'HEAD'], stdout=subprocess.PIPE)
|
|
||||||
GIT_HEAD = sp.communicate()[0].decode().strip() or None
|
|
||||||
except Exception:
|
|
||||||
GIT_HEAD = None
|
|
||||||
|
|
||||||
VERSION_FILE = f'''\
|
def get_git_head():
|
||||||
|
with contextlib.suppress(Exception):
|
||||||
|
return run_process('git', 'rev-parse', 'HEAD').stdout.strip()
|
||||||
|
|
||||||
|
|
||||||
|
VERSION_TEMPLATE = '''\
|
||||||
# Autogenerated by devscripts/update-version.py
|
# Autogenerated by devscripts/update-version.py
|
||||||
|
|
||||||
__version__ = {VERSION!r}
|
__version__ = {version!r}
|
||||||
|
|
||||||
RELEASE_GIT_HEAD = {GIT_HEAD!r}
|
RELEASE_GIT_HEAD = {git_head!r}
|
||||||
|
|
||||||
|
VARIANT = None
|
||||||
|
|
||||||
|
UPDATE_HINT = None
|
||||||
|
|
||||||
|
CHANNEL = {channel!r}
|
||||||
|
|
||||||
|
ORIGIN = {origin!r}
|
||||||
|
|
||||||
|
_pkg_version = {package_version!r}
|
||||||
'''
|
'''
|
||||||
|
|
||||||
with open('yt_dlp/version.py', 'wt') as f:
|
if __name__ == '__main__':
|
||||||
f.write(VERSION_FILE)
|
parser = argparse.ArgumentParser(description='Update the version.py file')
|
||||||
|
parser.add_argument(
|
||||||
|
'-c', '--channel', default='stable',
|
||||||
|
help='Select update channel (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'-r', '--origin', default='local',
|
||||||
|
help='Select origin/repository (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'-s', '--suffix', default='',
|
||||||
|
help='Add an alphanumeric suffix to the package version, e.g. "dev"')
|
||||||
|
parser.add_argument(
|
||||||
|
'-o', '--output', default='yt_dlp/version.py',
|
||||||
|
help='The output file to write to (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'version', nargs='?', default=None,
|
||||||
|
help='A version or revision to use instead of generating one')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
print('::set-output name=ytdlp_version::' + VERSION)
|
git_head = get_git_head()
|
||||||
print(f'\nVersion = {VERSION}, Git HEAD = {GIT_HEAD}')
|
version = (
|
||||||
|
args.version if args.version and '.' in args.version
|
||||||
|
else get_new_version(None, args.version))
|
||||||
|
write_file(args.output, VERSION_TEMPLATE.format(
|
||||||
|
version=version, git_head=git_head, channel=args.channel, origin=args.origin,
|
||||||
|
package_version=f'{version}{args.suffix}'))
|
||||||
|
|
||||||
|
print(f'version={version} ({args.channel}), head={git_head}')
|
||||||
|
|||||||
26
devscripts/update_changelog.py
Executable file
26
devscripts/update_changelog.py
Executable file
@@ -0,0 +1,26 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from devscripts.make_changelog import create_changelog, create_parser
|
||||||
|
from devscripts.utils import read_file, read_version, write_file
|
||||||
|
|
||||||
|
# Always run after devscripts/update-version.py, and run before `make doc|pypi-files|tar|all`
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
parser = create_parser()
|
||||||
|
parser.description = 'Update an existing changelog file with an entry for a new release'
|
||||||
|
parser.add_argument(
|
||||||
|
'--changelog-path', type=Path, default=Path(__file__).parent.parent / 'Changelog.md',
|
||||||
|
help='path to the Changelog file')
|
||||||
|
args = parser.parse_args()
|
||||||
|
new_entry = create_changelog(args)
|
||||||
|
|
||||||
|
header, sep, changelog = read_file(args.changelog_path).partition('\n### ')
|
||||||
|
write_file(args.changelog_path, f'{header}{sep}{read_version()}\n{new_entry}\n{sep}{changelog}')
|
||||||
47
devscripts/utils.py
Normal file
47
devscripts/utils.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
import argparse
|
||||||
|
import functools
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
|
def read_file(fname):
|
||||||
|
with open(fname, encoding='utf-8') as f:
|
||||||
|
return f.read()
|
||||||
|
|
||||||
|
|
||||||
|
def write_file(fname, content, mode='w'):
|
||||||
|
with open(fname, mode, encoding='utf-8') as f:
|
||||||
|
return f.write(content)
|
||||||
|
|
||||||
|
|
||||||
|
def read_version(fname='yt_dlp/version.py', varname='__version__'):
|
||||||
|
"""Get the version without importing the package"""
|
||||||
|
items = {}
|
||||||
|
exec(compile(read_file(fname), fname, 'exec'), items)
|
||||||
|
return items[varname]
|
||||||
|
|
||||||
|
|
||||||
|
def get_filename_args(has_infile=False, default_outfile=None):
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
if has_infile:
|
||||||
|
parser.add_argument('infile', help='Input file')
|
||||||
|
kwargs = {'nargs': '?', 'default': default_outfile} if default_outfile else {}
|
||||||
|
parser.add_argument('outfile', **kwargs, help='Output file')
|
||||||
|
|
||||||
|
opts = parser.parse_args()
|
||||||
|
if has_infile:
|
||||||
|
return opts.infile, opts.outfile
|
||||||
|
return opts.outfile
|
||||||
|
|
||||||
|
|
||||||
|
def compose_functions(*functions):
|
||||||
|
return lambda x: functools.reduce(lambda y, f: f(y), functions, x)
|
||||||
|
|
||||||
|
|
||||||
|
def run_process(*args, **kwargs):
|
||||||
|
kwargs.setdefault('text', True)
|
||||||
|
kwargs.setdefault('check', True)
|
||||||
|
kwargs.setdefault('capture_output', True)
|
||||||
|
if kwargs['text']:
|
||||||
|
kwargs.setdefault('encoding', 'utf-8')
|
||||||
|
kwargs.setdefault('errors', 'replace')
|
||||||
|
return subprocess.run(args, **kwargs)
|
||||||
@@ -1,58 +0,0 @@
|
|||||||
# UNUSED
|
|
||||||
|
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Run with as parameter a setup.py that works in the current directory
|
|
||||||
# e.g. no os.chdir()
|
|
||||||
# It will run twice, the first time will crash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )"
|
|
||||||
|
|
||||||
if [ ! -d wine-py2exe ]; then
|
|
||||||
|
|
||||||
sudo apt-get install wine1.3 axel bsdiff
|
|
||||||
|
|
||||||
mkdir wine-py2exe
|
|
||||||
cd wine-py2exe
|
|
||||||
export WINEPREFIX=`pwd`
|
|
||||||
|
|
||||||
axel -a "http://www.python.org/ftp/python/2.7/python-2.7.msi"
|
|
||||||
axel -a "http://downloads.sourceforge.net/project/py2exe/py2exe/0.6.9/py2exe-0.6.9.win32-py2.7.exe"
|
|
||||||
#axel -a "http://winetricks.org/winetricks"
|
|
||||||
|
|
||||||
# http://appdb.winehq.org/objectManager.php?sClass=version&iId=21957
|
|
||||||
echo "Follow python setup on screen"
|
|
||||||
wine msiexec /i python-2.7.msi
|
|
||||||
|
|
||||||
echo "Follow py2exe setup on screen"
|
|
||||||
wine py2exe-0.6.9.win32-py2.7.exe
|
|
||||||
|
|
||||||
#echo "Follow Microsoft Visual C++ 2008 Redistributable Package setup on screen"
|
|
||||||
#bash winetricks vcrun2008
|
|
||||||
|
|
||||||
rm py2exe-0.6.9.win32-py2.7.exe
|
|
||||||
rm python-2.7.msi
|
|
||||||
#rm winetricks
|
|
||||||
|
|
||||||
# http://bugs.winehq.org/show_bug.cgi?id=3591
|
|
||||||
|
|
||||||
mv drive_c/Python27/Lib/site-packages/py2exe/run.exe drive_c/Python27/Lib/site-packages/py2exe/run.exe.backup
|
|
||||||
bspatch drive_c/Python27/Lib/site-packages/py2exe/run.exe.backup drive_c/Python27/Lib/site-packages/py2exe/run.exe "$SCRIPT_DIR/SizeOfImage.patch"
|
|
||||||
mv drive_c/Python27/Lib/site-packages/py2exe/run_w.exe drive_c/Python27/Lib/site-packages/py2exe/run_w.exe.backup
|
|
||||||
bspatch drive_c/Python27/Lib/site-packages/py2exe/run_w.exe.backup drive_c/Python27/Lib/site-packages/py2exe/run_w.exe "$SCRIPT_DIR/SizeOfImage_w.patch"
|
|
||||||
|
|
||||||
cd -
|
|
||||||
|
|
||||||
else
|
|
||||||
|
|
||||||
export WINEPREFIX="$( cd wine-py2exe && pwd )"
|
|
||||||
|
|
||||||
fi
|
|
||||||
|
|
||||||
wine "C:\\Python27\\python.exe" "$1" py2exe > "py2exe.log" 2>&1 || true
|
|
||||||
echo '# Copying python27.dll' >> "py2exe.log"
|
|
||||||
cp "$WINEPREFIX/drive_c/windows/system32/python27.dll" build/bdist.win32/winexe/bundle-2.7/
|
|
||||||
wine "C:\\Python27\\python.exe" "$1" py2exe >> "py2exe.log" 2>&1
|
|
||||||
|
|
||||||
@@ -1,11 +1,12 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
import os
|
import os
|
||||||
from os.path import dirname as dirn
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
sys.path.insert(0, dirn(dirn((os.path.abspath(__file__)))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
import yt_dlp
|
import yt_dlp
|
||||||
|
|
||||||
ZSH_COMPLETION_FILE = "completions/zsh/_yt-dlp"
|
ZSH_COMPLETION_FILE = "completions/zsh/_yt-dlp"
|
||||||
@@ -45,5 +46,5 @@ def build_completion(opt_parser):
|
|||||||
f.write(template)
|
f.write(template)
|
||||||
|
|
||||||
|
|
||||||
parser = yt_dlp.parseOpts()[0]
|
parser = yt_dlp.parseOpts(ignore_config_files=True)[0]
|
||||||
build_completion(parser)
|
build_completion(parser)
|
||||||
|
|||||||
29
public.key
Normal file
29
public.key
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||||
|
|
||||||
|
mQINBGP78C4BEAD0rF9zjGPAt0thlt5C1ebzccAVX7Nb1v+eqQjk+WEZdTETVCg3
|
||||||
|
WAM5ngArlHdm/fZqzUgO+pAYrB60GKeg7ffUDf+S0XFKEZdeRLYeAaqqKhSibVal
|
||||||
|
DjvOBOztu3W607HLETQAqA7wTPuIt2WqmpL60NIcyr27LxqmgdN3mNvZ2iLO+bP0
|
||||||
|
nKR/C+PgE9H4ytywDa12zMx6PmZCnVOOOu6XZEFmdUxxdQ9fFDqd9LcBKY2LDOcS
|
||||||
|
Yo1saY0YWiZWHtzVoZu1kOzjnS5Fjq/yBHJLImDH7pNxHm7s/PnaurpmQFtDFruk
|
||||||
|
t+2lhDnpKUmGr/I/3IHqH/X+9nPoS4uiqQ5HpblB8BK+4WfpaiEg75LnvuOPfZIP
|
||||||
|
KYyXa/0A7QojMwgOrD88ozT+VCkKkkJ+ijXZ7gHNjmcBaUdKK7fDIEOYI63Lyc6Q
|
||||||
|
WkGQTigFffSUXWHDCO9aXNhP3ejqFWgGMtCUsrbkcJkWuWY7q5ARy/05HbSM3K4D
|
||||||
|
U9eqtnxmiV1WQ8nXuI9JgJQRvh5PTkny5LtxqzcmqvWO9TjHBbrs14BPEO9fcXxK
|
||||||
|
L/CFBbzXDSvvAgArdqqlMoncQ/yicTlfL6qzJ8EKFiqW14QMTdAn6SuuZTodXCTi
|
||||||
|
InwoT7WjjuFPKKdvfH1GP4bnqdzTnzLxCSDIEtfyfPsIX+9GI7Jkk/zZjQARAQAB
|
||||||
|
tDdTaW1vbiBTYXdpY2tpICh5dC1kbHAgc2lnbmluZyBrZXkpIDxjb250YWN0QGdy
|
||||||
|
dWI0ay54eXo+iQJOBBMBCgA4FiEErAy75oSNaoc0ZK9OV89lkztadYEFAmP78C4C
|
||||||
|
GwMFCwkIBwIGFQoJCAsCBBYCAwECHgECF4AACgkQV89lkztadYEVqQ//cW7TxhXg
|
||||||
|
7Xbh2EZQzXml0egn6j8QaV9KzGragMiShrlvTO2zXfLXqyizrFP4AspgjSn/4NrI
|
||||||
|
8mluom+Yi+qr7DXT4BjQqIM9y3AjwZPdywe912Lxcw52NNoPZCm24I9T7ySc8lmR
|
||||||
|
FQvZC0w4H/VTNj/2lgJ1dwMflpwvNRiWa5YzcFGlCUeDIPskLx9++AJE+xwU3LYm
|
||||||
|
jQQsPBqpHHiTBEJzMLl+rfd9Fg4N+QNzpFkTDW3EPerLuvJniSBBwZthqxeAtw4M
|
||||||
|
UiAXh6JvCc2hJkKCoygRfM281MeolvmsGNyQm+axlB0vyldiPP6BnaRgZlx+l6MU
|
||||||
|
cPqgHblb7RW5j9lfr6OYL7SceBIHNv0CFrt1OnkGo/tVMwcs8LH3Ae4a7UJlIceL
|
||||||
|
V54aRxSsZU7w4iX+PB79BWkEsQzwKrUuJVOeL4UDwWajp75OFaUqbS/slDDVXvK5
|
||||||
|
OIeuth3mA/adjdvgjPxhRQjA3l69rRWIJDrqBSHldmRsnX6cvXTDy8wSXZgy51lP
|
||||||
|
m4IVLHnCy9m4SaGGoAsfTZS0cC9FgjUIyTyrq9M67wOMpUxnuB0aRZgJE1DsI23E
|
||||||
|
qdvcSNVlO+39xM/KPWUEh6b83wMn88QeW+DCVGWACQq5N3YdPnAJa50617fGbY6I
|
||||||
|
gXIoRHXkDqe23PZ/jURYCv0sjVtjPoVC+bg=
|
||||||
|
=bJkn
|
||||||
|
-----END PGP PUBLIC KEY BLOCK-----
|
||||||
278
pyproject.toml
Normal file
278
pyproject.toml
Normal file
@@ -0,0 +1,278 @@
|
|||||||
|
[build-system]
|
||||||
|
requires = ["hatchling"]
|
||||||
|
build-backend = "hatchling.build"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "yt-dlp"
|
||||||
|
maintainers = [
|
||||||
|
{name = "pukkandan", email = "pukkandan.ytdlp@gmail.com"},
|
||||||
|
{name = "Grub4K", email = "contact@grub4k.xyz"},
|
||||||
|
{name = "bashonly", email = "bashonly@protonmail.com"},
|
||||||
|
{name = "coletdjnz", email = "coletdjnz@protonmail.com"},
|
||||||
|
]
|
||||||
|
description = "A feature-rich command-line audio/video downloader"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.8"
|
||||||
|
keywords = [
|
||||||
|
"youtube-dl",
|
||||||
|
"video-downloader",
|
||||||
|
"youtube-downloader",
|
||||||
|
"sponsorblock",
|
||||||
|
"youtube-dlc",
|
||||||
|
"yt-dlp",
|
||||||
|
]
|
||||||
|
license = {file = "LICENSE"}
|
||||||
|
classifiers = [
|
||||||
|
"Topic :: Multimedia :: Video",
|
||||||
|
"Development Status :: 5 - Production/Stable",
|
||||||
|
"Environment :: Console",
|
||||||
|
"Programming Language :: Python",
|
||||||
|
"Programming Language :: Python :: 3 :: Only",
|
||||||
|
"Programming Language :: Python :: 3.8",
|
||||||
|
"Programming Language :: Python :: 3.9",
|
||||||
|
"Programming Language :: Python :: 3.10",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Programming Language :: Python :: 3.12",
|
||||||
|
"Programming Language :: Python :: Implementation",
|
||||||
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
|
"Programming Language :: Python :: Implementation :: PyPy",
|
||||||
|
"License :: OSI Approved :: The Unlicense (Unlicense)",
|
||||||
|
"Operating System :: OS Independent",
|
||||||
|
]
|
||||||
|
dynamic = ["version"]
|
||||||
|
dependencies = [
|
||||||
|
"brotli; implementation_name=='cpython'",
|
||||||
|
"brotlicffi; implementation_name!='cpython'",
|
||||||
|
"certifi",
|
||||||
|
"mutagen",
|
||||||
|
"pycryptodomex",
|
||||||
|
"requests>=2.31.0,<3",
|
||||||
|
"urllib3>=1.26.17,<3",
|
||||||
|
"websockets>=12.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
default = []
|
||||||
|
curl-cffi = ["curl-cffi==0.5.10; implementation_name=='cpython'"]
|
||||||
|
secretstorage = [
|
||||||
|
"cffi",
|
||||||
|
"secretstorage",
|
||||||
|
]
|
||||||
|
build = [
|
||||||
|
"build",
|
||||||
|
"hatchling",
|
||||||
|
"pip",
|
||||||
|
"setuptools>=66.1.0,<70",
|
||||||
|
"wheel",
|
||||||
|
]
|
||||||
|
dev = [
|
||||||
|
"pre-commit",
|
||||||
|
"yt-dlp[static-analysis]",
|
||||||
|
"yt-dlp[test]",
|
||||||
|
]
|
||||||
|
static-analysis = [
|
||||||
|
"autopep8~=2.0",
|
||||||
|
"ruff~=0.4.4",
|
||||||
|
]
|
||||||
|
test = [
|
||||||
|
"pytest~=8.1",
|
||||||
|
]
|
||||||
|
pyinstaller = [
|
||||||
|
"pyinstaller>=6.3; sys_platform!='darwin'",
|
||||||
|
"pyinstaller==5.13.2; sys_platform=='darwin'", # needed for curl_cffi
|
||||||
|
]
|
||||||
|
py2exe = [
|
||||||
|
"py2exe>=0.12",
|
||||||
|
"requests==2.31.*",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
Documentation = "https://github.com/yt-dlp/yt-dlp#readme"
|
||||||
|
Repository = "https://github.com/yt-dlp/yt-dlp"
|
||||||
|
Tracker = "https://github.com/yt-dlp/yt-dlp/issues"
|
||||||
|
Funding = "https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators"
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
yt-dlp = "yt_dlp:main"
|
||||||
|
|
||||||
|
[project.entry-points.pyinstaller40]
|
||||||
|
hook-dirs = "yt_dlp.__pyinstaller:get_hook_dirs"
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.sdist]
|
||||||
|
include = [
|
||||||
|
"/yt_dlp",
|
||||||
|
"/devscripts",
|
||||||
|
"/test",
|
||||||
|
"/.gitignore", # included by default, needed for auto-excludes
|
||||||
|
"/Changelog.md",
|
||||||
|
"/LICENSE", # included as license
|
||||||
|
"/pyproject.toml", # included by default
|
||||||
|
"/README.md", # included as readme
|
||||||
|
"/setup.cfg",
|
||||||
|
"/supportedsites.md",
|
||||||
|
]
|
||||||
|
artifacts = [
|
||||||
|
"/yt_dlp/extractor/lazy_extractors.py",
|
||||||
|
"/completions",
|
||||||
|
"/AUTHORS", # included by default
|
||||||
|
"/README.txt",
|
||||||
|
"/yt-dlp.1",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.wheel]
|
||||||
|
packages = ["yt_dlp"]
|
||||||
|
artifacts = ["/yt_dlp/extractor/lazy_extractors.py"]
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.wheel.shared-data]
|
||||||
|
"completions/bash/yt-dlp" = "share/bash-completion/completions/yt-dlp"
|
||||||
|
"completions/zsh/_yt-dlp" = "share/zsh/site-functions/_yt-dlp"
|
||||||
|
"completions/fish/yt-dlp.fish" = "share/fish/vendor_completions.d/yt-dlp.fish"
|
||||||
|
"README.txt" = "share/doc/yt_dlp/README.txt"
|
||||||
|
"yt-dlp.1" = "share/man/man1/yt-dlp.1"
|
||||||
|
|
||||||
|
[tool.hatch.version]
|
||||||
|
path = "yt_dlp/version.py"
|
||||||
|
pattern = "_pkg_version = '(?P<version>[^']+)'"
|
||||||
|
|
||||||
|
[tool.hatch.envs.default]
|
||||||
|
features = ["curl-cffi", "default"]
|
||||||
|
dependencies = ["pre-commit"]
|
||||||
|
path = ".venv"
|
||||||
|
installer = "uv"
|
||||||
|
|
||||||
|
[tool.hatch.envs.default.scripts]
|
||||||
|
setup = "pre-commit install --config .pre-commit-hatch.yaml"
|
||||||
|
yt-dlp = "python -Werror -Xdev -m yt_dlp {args}"
|
||||||
|
|
||||||
|
[tool.hatch.envs.hatch-static-analysis]
|
||||||
|
detached = true
|
||||||
|
features = ["static-analysis"]
|
||||||
|
dependencies = [] # override hatch ruff version
|
||||||
|
config-path = "pyproject.toml"
|
||||||
|
|
||||||
|
[tool.hatch.envs.hatch-static-analysis.scripts]
|
||||||
|
format-check = "autopep8 --diff {args:.}"
|
||||||
|
format-fix = "autopep8 --in-place {args:.}"
|
||||||
|
lint-check = "ruff check {args:.}"
|
||||||
|
lint-fix = "ruff check --fix {args:.}"
|
||||||
|
|
||||||
|
[tool.hatch.envs.hatch-test]
|
||||||
|
features = ["test"]
|
||||||
|
dependencies = [
|
||||||
|
"pytest-randomly~=3.15",
|
||||||
|
"pytest-rerunfailures~=14.0",
|
||||||
|
"pytest-xdist[psutil]~=3.5",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.hatch.envs.hatch-test.scripts]
|
||||||
|
run = "python -m devscripts.run_tests {args}"
|
||||||
|
run-cov = "echo Code coverage not implemented && exit 1"
|
||||||
|
|
||||||
|
[[tool.hatch.envs.hatch-test.matrix]]
|
||||||
|
python = [
|
||||||
|
"3.8",
|
||||||
|
"3.9",
|
||||||
|
"3.10",
|
||||||
|
"3.11",
|
||||||
|
"3.12",
|
||||||
|
"pypy3.8",
|
||||||
|
"pypy3.9",
|
||||||
|
"pypy3.10",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
line-length = 120
|
||||||
|
|
||||||
|
[tool.ruff.lint]
|
||||||
|
ignore = [
|
||||||
|
"E402", # module level import not at top of file
|
||||||
|
"E501", # line too long
|
||||||
|
"E731", # do not assign a lambda expression, use a def
|
||||||
|
"E741", # ambiguous variable name
|
||||||
|
]
|
||||||
|
select = [
|
||||||
|
"E", # pycodestyle errors
|
||||||
|
"W", # pycodestyle warnings
|
||||||
|
"F", # pyflakes
|
||||||
|
"I", # import order
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.lint.per-file-ignores]
|
||||||
|
"devscripts/lazy_load_template.py" = ["F401"]
|
||||||
|
"!yt_dlp/extractor/**.py" = ["I"]
|
||||||
|
|
||||||
|
[tool.ruff.lint.isort]
|
||||||
|
known-first-party = [
|
||||||
|
"bundle",
|
||||||
|
"devscripts",
|
||||||
|
"test",
|
||||||
|
]
|
||||||
|
relative-imports-order = "closest-to-furthest"
|
||||||
|
|
||||||
|
[tool.autopep8]
|
||||||
|
max_line_length = 120
|
||||||
|
recursive = true
|
||||||
|
exit-code = true
|
||||||
|
jobs = 0
|
||||||
|
select = [
|
||||||
|
"E101",
|
||||||
|
"E112",
|
||||||
|
"E113",
|
||||||
|
"E115",
|
||||||
|
"E116",
|
||||||
|
"E117",
|
||||||
|
"E121",
|
||||||
|
"E122",
|
||||||
|
"E123",
|
||||||
|
"E124",
|
||||||
|
"E125",
|
||||||
|
"E126",
|
||||||
|
"E127",
|
||||||
|
"E128",
|
||||||
|
"E129",
|
||||||
|
"E131",
|
||||||
|
"E201",
|
||||||
|
"E202",
|
||||||
|
"E203",
|
||||||
|
"E211",
|
||||||
|
"E221",
|
||||||
|
"E222",
|
||||||
|
"E223",
|
||||||
|
"E224",
|
||||||
|
"E225",
|
||||||
|
"E226",
|
||||||
|
"E227",
|
||||||
|
"E228",
|
||||||
|
"E231",
|
||||||
|
"E241",
|
||||||
|
"E242",
|
||||||
|
"E251",
|
||||||
|
"E252",
|
||||||
|
"E261",
|
||||||
|
"E262",
|
||||||
|
"E265",
|
||||||
|
"E266",
|
||||||
|
"E271",
|
||||||
|
"E272",
|
||||||
|
"E273",
|
||||||
|
"E274",
|
||||||
|
"E275",
|
||||||
|
"E301",
|
||||||
|
"E302",
|
||||||
|
"E303",
|
||||||
|
"E304",
|
||||||
|
"E305",
|
||||||
|
"E306",
|
||||||
|
"E502",
|
||||||
|
"E701",
|
||||||
|
"E702",
|
||||||
|
"E704",
|
||||||
|
"W391",
|
||||||
|
"W504",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
addopts = "-ra -v --strict-markers"
|
||||||
|
markers = [
|
||||||
|
"download",
|
||||||
|
]
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
[pytest]
|
|
||||||
addopts = -ra -v --strict-markers
|
|
||||||
markers =
|
|
||||||
download
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
mutagen
|
|
||||||
pycryptodomex
|
|
||||||
websockets
|
|
||||||
brotli; platform_python_implementation=='CPython'
|
|
||||||
brotlicffi; platform_python_implementation!='CPython'
|
|
||||||
certifi
|
|
||||||
43
setup.cfg
43
setup.cfg
@@ -1,6 +1,39 @@
|
|||||||
[wheel]
|
|
||||||
universal = True
|
|
||||||
|
|
||||||
[flake8]
|
[flake8]
|
||||||
exclude = yt_dlp/extractor/__init__.py,devscripts/buildserver.py,devscripts/lazy_load_template.py,devscripts/make_issue_template.py,setup.py,build,.git,venv,devscripts/create-github-release.py,devscripts/release.sh,devscripts/show-downloads-statistics.py
|
exclude = build,venv,.tox,.git,.pytest_cache
|
||||||
ignore = E402,E501,E731,E741,W503
|
ignore = E402,E501,E731,E741,W503
|
||||||
|
max_line_length = 120
|
||||||
|
per_file_ignores =
|
||||||
|
devscripts/lazy_load_template.py: F401
|
||||||
|
|
||||||
|
|
||||||
|
[autoflake]
|
||||||
|
ignore-init-module-imports = true
|
||||||
|
ignore-pass-after-docstring = true
|
||||||
|
remove-all-unused-imports = true
|
||||||
|
remove-duplicate-keys = true
|
||||||
|
remove-unused-variables = true
|
||||||
|
|
||||||
|
|
||||||
|
[tox:tox]
|
||||||
|
skipsdist = true
|
||||||
|
envlist = py{38,39,310,311,312},pypy{38,39,310}
|
||||||
|
skip_missing_interpreters = true
|
||||||
|
|
||||||
|
[testenv] # tox
|
||||||
|
deps =
|
||||||
|
pytest
|
||||||
|
commands = pytest {posargs:"-m not download"}
|
||||||
|
passenv = HOME # For test_compat_expanduser
|
||||||
|
setenv =
|
||||||
|
# PYTHONWARNINGS = error # Catches PIP's warnings too
|
||||||
|
|
||||||
|
|
||||||
|
[isort]
|
||||||
|
py_version = 38
|
||||||
|
multi_line_output = VERTICAL_HANGING_INDENT
|
||||||
|
line_length = 80
|
||||||
|
reverse_relative = true
|
||||||
|
ensure_newline_before_comments = true
|
||||||
|
include_trailing_comma = true
|
||||||
|
known_first_party =
|
||||||
|
test
|
||||||
|
|||||||
142
setup.py
142
setup.py
@@ -1,142 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
# coding: utf-8
|
|
||||||
import os.path
|
|
||||||
import warnings
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
from setuptools import setup, Command, find_packages
|
|
||||||
setuptools_available = True
|
|
||||||
except ImportError:
|
|
||||||
from distutils.core import setup, Command
|
|
||||||
setuptools_available = False
|
|
||||||
from distutils.spawn import spawn
|
|
||||||
|
|
||||||
# Get the version from yt_dlp/version.py without importing the package
|
|
||||||
exec(compile(open('yt_dlp/version.py').read(), 'yt_dlp/version.py', 'exec'))
|
|
||||||
|
|
||||||
|
|
||||||
DESCRIPTION = 'A youtube-dl fork with additional features and patches'
|
|
||||||
|
|
||||||
LONG_DESCRIPTION = '\n\n'.join((
|
|
||||||
'Official repository: <https://github.com/yt-dlp/yt-dlp>',
|
|
||||||
'**PS**: Some links in this document will not work since this is a copy of the README.md from Github',
|
|
||||||
open('README.md', encoding='utf-8').read()))
|
|
||||||
|
|
||||||
REQUIREMENTS = open('requirements.txt', encoding='utf-8').read().splitlines()
|
|
||||||
|
|
||||||
|
|
||||||
if sys.argv[1:2] == ['py2exe']:
|
|
||||||
import py2exe
|
|
||||||
warnings.warn(
|
|
||||||
'py2exe builds do not support pycryptodomex and needs VC++14 to run. '
|
|
||||||
'The recommended way is to use "pyinst.py" to build using pyinstaller')
|
|
||||||
params = {
|
|
||||||
'console': [{
|
|
||||||
'script': './yt_dlp/__main__.py',
|
|
||||||
'dest_base': 'yt-dlp',
|
|
||||||
'version': __version__,
|
|
||||||
'description': DESCRIPTION,
|
|
||||||
'comments': LONG_DESCRIPTION.split('\n')[0],
|
|
||||||
'product_name': 'yt-dlp',
|
|
||||||
'product_version': __version__,
|
|
||||||
}],
|
|
||||||
'options': {
|
|
||||||
'py2exe': {
|
|
||||||
'bundle_files': 0,
|
|
||||||
'compressed': 1,
|
|
||||||
'optimize': 2,
|
|
||||||
'dist_dir': './dist',
|
|
||||||
'excludes': ['Crypto', 'Cryptodome'], # py2exe cannot import Crypto
|
|
||||||
'dll_excludes': ['w9xpopen.exe', 'crypt32.dll'],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'zipfile': None
|
|
||||||
}
|
|
||||||
|
|
||||||
else:
|
|
||||||
files_spec = [
|
|
||||||
('share/bash-completion/completions', ['completions/bash/yt-dlp']),
|
|
||||||
('share/zsh/site-functions', ['completions/zsh/_yt-dlp']),
|
|
||||||
('share/fish/vendor_completions.d', ['completions/fish/yt-dlp.fish']),
|
|
||||||
('share/doc/yt_dlp', ['README.txt']),
|
|
||||||
('share/man/man1', ['yt-dlp.1'])
|
|
||||||
]
|
|
||||||
root = os.path.dirname(os.path.abspath(__file__))
|
|
||||||
data_files = []
|
|
||||||
for dirname, files in files_spec:
|
|
||||||
resfiles = []
|
|
||||||
for fn in files:
|
|
||||||
if not os.path.exists(fn):
|
|
||||||
warnings.warn('Skipping file %s since it is not present. Try running `make pypi-files` first' % fn)
|
|
||||||
else:
|
|
||||||
resfiles.append(fn)
|
|
||||||
data_files.append((dirname, resfiles))
|
|
||||||
|
|
||||||
params = {
|
|
||||||
'data_files': data_files,
|
|
||||||
}
|
|
||||||
|
|
||||||
if setuptools_available:
|
|
||||||
params['entry_points'] = {'console_scripts': ['yt-dlp = yt_dlp:main']}
|
|
||||||
else:
|
|
||||||
params['scripts'] = ['yt-dlp']
|
|
||||||
|
|
||||||
|
|
||||||
class build_lazy_extractors(Command):
|
|
||||||
description = 'Build the extractor lazy loading module'
|
|
||||||
user_options = []
|
|
||||||
|
|
||||||
def initialize_options(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def finalize_options(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
spawn([sys.executable, 'devscripts/make_lazy_extractors.py', 'yt_dlp/extractor/lazy_extractors.py'],
|
|
||||||
dry_run=self.dry_run)
|
|
||||||
|
|
||||||
|
|
||||||
if setuptools_available:
|
|
||||||
packages = find_packages(exclude=('youtube_dl', 'youtube_dlc', 'test', 'ytdlp_plugins'))
|
|
||||||
else:
|
|
||||||
packages = ['yt_dlp', 'yt_dlp.downloader', 'yt_dlp.extractor', 'yt_dlp.postprocessor']
|
|
||||||
|
|
||||||
|
|
||||||
setup(
|
|
||||||
name='yt-dlp',
|
|
||||||
version=__version__,
|
|
||||||
maintainer='pukkandan',
|
|
||||||
maintainer_email='pukkandan.ytdlp@gmail.com',
|
|
||||||
description=DESCRIPTION,
|
|
||||||
long_description=LONG_DESCRIPTION,
|
|
||||||
long_description_content_type='text/markdown',
|
|
||||||
url='https://github.com/yt-dlp/yt-dlp',
|
|
||||||
packages=packages,
|
|
||||||
install_requires=REQUIREMENTS,
|
|
||||||
project_urls={
|
|
||||||
'Documentation': 'https://yt-dlp.readthedocs.io',
|
|
||||||
'Source': 'https://github.com/yt-dlp/yt-dlp',
|
|
||||||
'Tracker': 'https://github.com/yt-dlp/yt-dlp/issues',
|
|
||||||
'Funding': 'https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators',
|
|
||||||
},
|
|
||||||
classifiers=[
|
|
||||||
'Topic :: Multimedia :: Video',
|
|
||||||
'Development Status :: 5 - Production/Stable',
|
|
||||||
'Environment :: Console',
|
|
||||||
'Programming Language :: Python',
|
|
||||||
'Programming Language :: Python :: 3.6',
|
|
||||||
'Programming Language :: Python :: 3.7',
|
|
||||||
'Programming Language :: Python :: 3.8',
|
|
||||||
'Programming Language :: Python :: Implementation',
|
|
||||||
'Programming Language :: Python :: Implementation :: CPython',
|
|
||||||
'Programming Language :: Python :: Implementation :: PyPy',
|
|
||||||
'License :: Public Domain',
|
|
||||||
'Operating System :: OS Independent',
|
|
||||||
],
|
|
||||||
python_requires='>=3.6',
|
|
||||||
|
|
||||||
cmdclass={'build_lazy_extractors': build_lazy_extractors},
|
|
||||||
**params
|
|
||||||
)
|
|
||||||
1324
supportedsites.md
1324
supportedsites.md
File diff suppressed because it is too large
Load Diff
64
test/conftest.py
Normal file
64
test/conftest.py
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
import inspect
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from yt_dlp.networking import RequestHandler
|
||||||
|
from yt_dlp.networking.common import _REQUEST_HANDLERS
|
||||||
|
from yt_dlp.utils._utils import _YDLLogger as FakeLogger
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def handler(request):
|
||||||
|
RH_KEY = getattr(request, 'param', None)
|
||||||
|
if not RH_KEY:
|
||||||
|
return
|
||||||
|
if inspect.isclass(RH_KEY) and issubclass(RH_KEY, RequestHandler):
|
||||||
|
handler = RH_KEY
|
||||||
|
elif RH_KEY in _REQUEST_HANDLERS:
|
||||||
|
handler = _REQUEST_HANDLERS[RH_KEY]
|
||||||
|
else:
|
||||||
|
pytest.skip(f'{RH_KEY} request handler is not available')
|
||||||
|
|
||||||
|
class HandlerWrapper(handler):
|
||||||
|
RH_KEY = handler.RH_KEY
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(logger=FakeLogger, *args, **kwargs)
|
||||||
|
|
||||||
|
return HandlerWrapper
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def skip_handler(request, handler):
|
||||||
|
"""usage: pytest.mark.skip_handler('my_handler', 'reason')"""
|
||||||
|
for marker in request.node.iter_markers('skip_handler'):
|
||||||
|
if marker.args[0] == handler.RH_KEY:
|
||||||
|
pytest.skip(marker.args[1] if len(marker.args) > 1 else '')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def skip_handler_if(request, handler):
|
||||||
|
"""usage: pytest.mark.skip_handler_if('my_handler', lambda request: True, 'reason')"""
|
||||||
|
for marker in request.node.iter_markers('skip_handler_if'):
|
||||||
|
if marker.args[0] == handler.RH_KEY and marker.args[1](request):
|
||||||
|
pytest.skip(marker.args[2] if len(marker.args) > 2 else '')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def skip_handlers_if(request, handler):
|
||||||
|
"""usage: pytest.mark.skip_handlers_if(lambda request, handler: True, 'reason')"""
|
||||||
|
for marker in request.node.iter_markers('skip_handlers_if'):
|
||||||
|
if handler and marker.args[0](request, handler):
|
||||||
|
pytest.skip(marker.args[1] if len(marker.args) > 1 else '')
|
||||||
|
|
||||||
|
|
||||||
|
def pytest_configure(config):
|
||||||
|
config.addinivalue_line(
|
||||||
|
"markers", "skip_handler(handler): skip test for the given handler",
|
||||||
|
)
|
||||||
|
config.addinivalue_line(
|
||||||
|
"markers", "skip_handler_if(handler): skip test for the given handler if condition is true"
|
||||||
|
)
|
||||||
|
config.addinivalue_line(
|
||||||
|
"markers", "skip_handlers_if(handler): skip test for handlers when the condition is true"
|
||||||
|
)
|
||||||
155
test/helper.py
155
test/helper.py
@@ -1,26 +1,16 @@
|
|||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
import io
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
import types
|
|
||||||
import ssl
|
import ssl
|
||||||
import sys
|
import sys
|
||||||
|
import types
|
||||||
|
|
||||||
import yt_dlp.extractor
|
import yt_dlp.extractor
|
||||||
from yt_dlp import YoutubeDL
|
from yt_dlp import YoutubeDL
|
||||||
from yt_dlp.compat import (
|
from yt_dlp.compat import compat_os_name
|
||||||
compat_os_name,
|
from yt_dlp.utils import preferredencoding, try_call, write_string, find_available_port
|
||||||
compat_str,
|
|
||||||
)
|
|
||||||
from yt_dlp.utils import (
|
|
||||||
preferredencoding,
|
|
||||||
write_string,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
if 'pytest' in sys.modules:
|
if 'pytest' in sys.modules:
|
||||||
import pytest
|
import pytest
|
||||||
@@ -35,10 +25,10 @@ def get_params(override=None):
|
|||||||
'parameters.json')
|
'parameters.json')
|
||||||
LOCAL_PARAMETERS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)),
|
LOCAL_PARAMETERS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)),
|
||||||
'local_parameters.json')
|
'local_parameters.json')
|
||||||
with io.open(PARAMETERS_FILE, encoding='utf-8') as pf:
|
with open(PARAMETERS_FILE, encoding='utf-8') as pf:
|
||||||
parameters = json.load(pf)
|
parameters = json.load(pf)
|
||||||
if os.path.exists(LOCAL_PARAMETERS_FILE):
|
if os.path.exists(LOCAL_PARAMETERS_FILE):
|
||||||
with io.open(LOCAL_PARAMETERS_FILE, encoding='utf-8') as pf:
|
with open(LOCAL_PARAMETERS_FILE, encoding='utf-8') as pf:
|
||||||
parameters.update(json.load(pf))
|
parameters.update(json.load(pf))
|
||||||
if override:
|
if override:
|
||||||
parameters.update(override)
|
parameters.update(override)
|
||||||
@@ -54,7 +44,7 @@ def try_rm(filename):
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
def report_warning(message):
|
def report_warning(message, *args, **kwargs):
|
||||||
'''
|
'''
|
||||||
Print the message to stderr, it will be prefixed with 'WARNING:'
|
Print the message to stderr, it will be prefixed with 'WARNING:'
|
||||||
If stderr is a tty file the 'WARNING:' will be colored
|
If stderr is a tty file the 'WARNING:' will be colored
|
||||||
@@ -63,8 +53,8 @@ def report_warning(message):
|
|||||||
_msg_header = '\033[0;33mWARNING:\033[0m'
|
_msg_header = '\033[0;33mWARNING:\033[0m'
|
||||||
else:
|
else:
|
||||||
_msg_header = 'WARNING:'
|
_msg_header = 'WARNING:'
|
||||||
output = '%s %s\n' % (_msg_header, message)
|
output = f'{_msg_header} {message}\n'
|
||||||
if 'b' in getattr(sys.stderr, 'mode', '') or sys.version_info[0] < 3:
|
if 'b' in getattr(sys.stderr, 'mode', ''):
|
||||||
output = output.encode(preferredencoding())
|
output = output.encode(preferredencoding())
|
||||||
sys.stderr.write(output)
|
sys.stderr.write(output)
|
||||||
|
|
||||||
@@ -74,13 +64,13 @@ class FakeYDL(YoutubeDL):
|
|||||||
# Different instances of the downloader can't share the same dictionary
|
# Different instances of the downloader can't share the same dictionary
|
||||||
# some test set the "sublang" parameter, which would break the md5 checks.
|
# some test set the "sublang" parameter, which would break the md5 checks.
|
||||||
params = get_params(override=override)
|
params = get_params(override=override)
|
||||||
super(FakeYDL, self).__init__(params, auto_init=False)
|
super().__init__(params, auto_init=False)
|
||||||
self.result = []
|
self.result = []
|
||||||
|
|
||||||
def to_screen(self, s, skip_eol=None):
|
def to_screen(self, s, *args, **kwargs):
|
||||||
print(s)
|
print(s)
|
||||||
|
|
||||||
def trouble(self, s, tb=None):
|
def trouble(self, s, *args, **kwargs):
|
||||||
raise Exception(s)
|
raise Exception(s)
|
||||||
|
|
||||||
def download(self, x):
|
def download(self, x):
|
||||||
@@ -90,56 +80,59 @@ class FakeYDL(YoutubeDL):
|
|||||||
# Silence an expected warning matching a regex
|
# Silence an expected warning matching a regex
|
||||||
old_report_warning = self.report_warning
|
old_report_warning = self.report_warning
|
||||||
|
|
||||||
def report_warning(self, message):
|
def report_warning(self, message, *args, **kwargs):
|
||||||
if re.match(regex, message):
|
if re.match(regex, message):
|
||||||
return
|
return
|
||||||
old_report_warning(message)
|
old_report_warning(message, *args, **kwargs)
|
||||||
self.report_warning = types.MethodType(report_warning, self)
|
self.report_warning = types.MethodType(report_warning, self)
|
||||||
|
|
||||||
|
|
||||||
def gettestcases(include_onlymatching=False):
|
def gettestcases(include_onlymatching=False):
|
||||||
for ie in yt_dlp.extractor.gen_extractors():
|
for ie in yt_dlp.extractor.gen_extractors():
|
||||||
for tc in ie.get_testcases(include_onlymatching):
|
yield from ie.get_testcases(include_onlymatching)
|
||||||
|
|
||||||
|
|
||||||
|
def getwebpagetestcases():
|
||||||
|
for ie in yt_dlp.extractor.gen_extractors():
|
||||||
|
for tc in ie.get_webpage_testcases():
|
||||||
|
tc.setdefault('add_ie', []).append('Generic')
|
||||||
yield tc
|
yield tc
|
||||||
|
|
||||||
|
|
||||||
md5 = lambda s: hashlib.md5(s.encode('utf-8')).hexdigest()
|
md5 = lambda s: hashlib.md5(s.encode()).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
def expect_value(self, got, expected, field):
|
def expect_value(self, got, expected, field):
|
||||||
if isinstance(expected, compat_str) and expected.startswith('re:'):
|
if isinstance(expected, str) and expected.startswith('re:'):
|
||||||
match_str = expected[len('re:'):]
|
match_str = expected[len('re:'):]
|
||||||
match_rex = re.compile(match_str)
|
match_rex = re.compile(match_str)
|
||||||
|
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
isinstance(got, compat_str),
|
isinstance(got, str),
|
||||||
'Expected a %s object, but got %s for field %s' % (
|
f'Expected a {str.__name__} object, but got {type(got).__name__} for field {field}')
|
||||||
compat_str.__name__, type(got).__name__, field))
|
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
match_rex.match(got),
|
match_rex.match(got),
|
||||||
'field %s (value: %r) should match %r' % (field, got, match_str))
|
f'field {field} (value: {got!r}) should match {match_str!r}')
|
||||||
elif isinstance(expected, compat_str) and expected.startswith('startswith:'):
|
elif isinstance(expected, str) and expected.startswith('startswith:'):
|
||||||
start_str = expected[len('startswith:'):]
|
start_str = expected[len('startswith:'):]
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
isinstance(got, compat_str),
|
isinstance(got, str),
|
||||||
'Expected a %s object, but got %s for field %s' % (
|
f'Expected a {str.__name__} object, but got {type(got).__name__} for field {field}')
|
||||||
compat_str.__name__, type(got).__name__, field))
|
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
got.startswith(start_str),
|
got.startswith(start_str),
|
||||||
'field %s (value: %r) should start with %r' % (field, got, start_str))
|
f'field {field} (value: {got!r}) should start with {start_str!r}')
|
||||||
elif isinstance(expected, compat_str) and expected.startswith('contains:'):
|
elif isinstance(expected, str) and expected.startswith('contains:'):
|
||||||
contains_str = expected[len('contains:'):]
|
contains_str = expected[len('contains:'):]
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
isinstance(got, compat_str),
|
isinstance(got, str),
|
||||||
'Expected a %s object, but got %s for field %s' % (
|
f'Expected a {str.__name__} object, but got {type(got).__name__} for field {field}')
|
||||||
compat_str.__name__, type(got).__name__, field))
|
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
contains_str in got,
|
contains_str in got,
|
||||||
'field %s (value: %r) should contain %r' % (field, got, contains_str))
|
f'field {field} (value: {got!r}) should contain {contains_str!r}')
|
||||||
elif isinstance(expected, type):
|
elif isinstance(expected, type):
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
isinstance(got, expected),
|
isinstance(got, expected),
|
||||||
'Expected type %r for field %s, but got value %r of type %r' % (expected, field, got, type(got)))
|
f'Expected type {expected!r} for field {field}, but got value {got!r} of type {type(got)!r}')
|
||||||
elif isinstance(expected, dict) and isinstance(got, dict):
|
elif isinstance(expected, dict) and isinstance(got, dict):
|
||||||
expect_dict(self, got, expected)
|
expect_dict(self, got, expected)
|
||||||
elif isinstance(expected, list) and isinstance(got, list):
|
elif isinstance(expected, list) and isinstance(got, list):
|
||||||
@@ -156,16 +149,15 @@ def expect_value(self, got, expected, field):
|
|||||||
index, field, type_expected, type_got))
|
index, field, type_expected, type_got))
|
||||||
expect_value(self, item_got, item_expected, field)
|
expect_value(self, item_got, item_expected, field)
|
||||||
else:
|
else:
|
||||||
if isinstance(expected, compat_str) and expected.startswith('md5:'):
|
if isinstance(expected, str) and expected.startswith('md5:'):
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
isinstance(got, compat_str),
|
isinstance(got, str),
|
||||||
'Expected field %s to be a unicode object, but got value %r of type %r' % (field, got, type(got)))
|
f'Expected field {field} to be a unicode object, but got value {got!r} of type {type(got)!r}')
|
||||||
got = 'md5:' + md5(got)
|
got = 'md5:' + md5(got)
|
||||||
elif isinstance(expected, compat_str) and re.match(r'^(?:min|max)?count:\d+', expected):
|
elif isinstance(expected, str) and re.match(r'^(?:min|max)?count:\d+', expected):
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
isinstance(got, (list, dict)),
|
isinstance(got, (list, dict)),
|
||||||
'Expected field %s to be a list or a dict, but it is of type %s' % (
|
f'Expected field {field} to be a list or a dict, but it is of type {type(got).__name__}')
|
||||||
field, type(got).__name__))
|
|
||||||
op, _, expected_num = expected.partition(':')
|
op, _, expected_num = expected.partition(':')
|
||||||
expected_num = int(expected_num)
|
expected_num = int(expected_num)
|
||||||
if op == 'mincount':
|
if op == 'mincount':
|
||||||
@@ -185,7 +177,7 @@ def expect_value(self, got, expected, field):
|
|||||||
return
|
return
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
expected, got,
|
expected, got,
|
||||||
'Invalid value for field %s, expected %r, got %r' % (field, expected, got))
|
f'Invalid value for field {field}, expected {expected!r}, got {got!r}')
|
||||||
|
|
||||||
|
|
||||||
def expect_dict(self, got_dict, expected_dict):
|
def expect_dict(self, got_dict, expected_dict):
|
||||||
@@ -202,8 +194,8 @@ def sanitize_got_info_dict(got_dict):
|
|||||||
'formats', 'thumbnails', 'subtitles', 'automatic_captions', 'comments', 'entries',
|
'formats', 'thumbnails', 'subtitles', 'automatic_captions', 'comments', 'entries',
|
||||||
|
|
||||||
# Auto-generated
|
# Auto-generated
|
||||||
'autonumber', 'playlist', 'format_index', 'video_ext', 'audio_ext', 'duration_string', 'epoch',
|
'autonumber', 'playlist', 'format_index', 'video_ext', 'audio_ext', 'duration_string', 'epoch', 'n_entries',
|
||||||
'fulltitle', 'extractor', 'extractor_key', 'filepath', 'infojson_filename', 'original_url', 'n_entries',
|
'fulltitle', 'extractor', 'extractor_key', 'filename', 'filepath', 'infojson_filename', 'original_url',
|
||||||
|
|
||||||
# Only live_status needs to be checked
|
# Only live_status needs to be checked
|
||||||
'is_live', 'was_live',
|
'is_live', 'was_live',
|
||||||
@@ -222,14 +214,27 @@ def sanitize_got_info_dict(got_dict):
|
|||||||
|
|
||||||
test_info_dict = {
|
test_info_dict = {
|
||||||
key: sanitize(key, value) for key, value in got_dict.items()
|
key: sanitize(key, value) for key, value in got_dict.items()
|
||||||
if value is not None and key not in IGNORED_FIELDS and not any(
|
if value is not None and key not in IGNORED_FIELDS and (
|
||||||
key.startswith(f'{prefix}_') for prefix in IGNORED_PREFIXES)
|
not any(key.startswith(f'{prefix}_') for prefix in IGNORED_PREFIXES)
|
||||||
|
or key == '_old_archive_ids')
|
||||||
}
|
}
|
||||||
|
|
||||||
# display_id may be generated from id
|
# display_id may be generated from id
|
||||||
if test_info_dict.get('display_id') == test_info_dict.get('id'):
|
if test_info_dict.get('display_id') == test_info_dict.get('id'):
|
||||||
test_info_dict.pop('display_id')
|
test_info_dict.pop('display_id')
|
||||||
|
|
||||||
|
# Remove deprecated fields
|
||||||
|
for old in YoutubeDL._deprecated_multivalue_fields.keys():
|
||||||
|
test_info_dict.pop(old, None)
|
||||||
|
|
||||||
|
# release_year may be generated from release_date
|
||||||
|
if try_call(lambda: test_info_dict['release_year'] == int(test_info_dict['release_date'][:4])):
|
||||||
|
test_info_dict.pop('release_year')
|
||||||
|
|
||||||
|
# Check url for flat entries
|
||||||
|
if got_dict.get('_type', 'video') != 'video' and got_dict.get('url'):
|
||||||
|
test_info_dict['url'] = got_dict['url']
|
||||||
|
|
||||||
return test_info_dict
|
return test_info_dict
|
||||||
|
|
||||||
|
|
||||||
@@ -243,33 +248,31 @@ def expect_info_dict(self, got_dict, expected_dict):
|
|||||||
for key in mandatory_fields:
|
for key in mandatory_fields:
|
||||||
self.assertTrue(got_dict.get(key), 'Missing mandatory field %s' % key)
|
self.assertTrue(got_dict.get(key), 'Missing mandatory field %s' % key)
|
||||||
# Check for mandatory fields that are automatically set by YoutubeDL
|
# Check for mandatory fields that are automatically set by YoutubeDL
|
||||||
for key in ['webpage_url', 'extractor', 'extractor_key']:
|
if got_dict.get('_type', 'video') == 'video':
|
||||||
self.assertTrue(got_dict.get(key), 'Missing field: %s' % key)
|
for key in ['webpage_url', 'extractor', 'extractor_key']:
|
||||||
|
self.assertTrue(got_dict.get(key), 'Missing field: %s' % key)
|
||||||
|
|
||||||
test_info_dict = sanitize_got_info_dict(got_dict)
|
test_info_dict = sanitize_got_info_dict(got_dict)
|
||||||
|
|
||||||
missing_keys = set(test_info_dict.keys()) - set(expected_dict.keys())
|
missing_keys = set(test_info_dict.keys()) - set(expected_dict.keys())
|
||||||
if missing_keys:
|
if missing_keys:
|
||||||
def _repr(v):
|
def _repr(v):
|
||||||
if isinstance(v, compat_str):
|
if isinstance(v, str):
|
||||||
return "'%s'" % v.replace('\\', '\\\\').replace("'", "\\'").replace('\n', '\\n')
|
return "'%s'" % v.replace('\\', '\\\\').replace("'", "\\'").replace('\n', '\\n')
|
||||||
elif isinstance(v, type):
|
elif isinstance(v, type):
|
||||||
return v.__name__
|
return v.__name__
|
||||||
else:
|
else:
|
||||||
return repr(v)
|
return repr(v)
|
||||||
info_dict_str = ''
|
info_dict_str = ''.join(
|
||||||
if len(missing_keys) != len(expected_dict):
|
f' {_repr(k)}: {_repr(v)},\n'
|
||||||
info_dict_str += ''.join(
|
for k, v in test_info_dict.items() if k not in missing_keys)
|
||||||
' %s: %s,\n' % (_repr(k), _repr(v))
|
if info_dict_str:
|
||||||
for k, v in test_info_dict.items() if k not in missing_keys)
|
info_dict_str += '\n'
|
||||||
|
|
||||||
if info_dict_str:
|
|
||||||
info_dict_str += '\n'
|
|
||||||
info_dict_str += ''.join(
|
info_dict_str += ''.join(
|
||||||
' %s: %s,\n' % (_repr(k), _repr(test_info_dict[k]))
|
f' {_repr(k)}: {_repr(test_info_dict[k])},\n'
|
||||||
for k in missing_keys)
|
for k in missing_keys)
|
||||||
write_string(
|
info_dict_str = '\n\'info_dict\': {\n' + info_dict_str + '},\n'
|
||||||
'\n\'info_dict\': {\n' + info_dict_str + '},\n', out=sys.stderr)
|
write_string(info_dict_str.replace('\n', '\n '), out=sys.stderr)
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
missing_keys,
|
missing_keys,
|
||||||
'Missing keys in test definition: %s' % (
|
'Missing keys in test definition: %s' % (
|
||||||
@@ -295,30 +298,30 @@ def assertRegexpMatches(self, text, regexp, msg=None):
|
|||||||
def assertGreaterEqual(self, got, expected, msg=None):
|
def assertGreaterEqual(self, got, expected, msg=None):
|
||||||
if not (got >= expected):
|
if not (got >= expected):
|
||||||
if msg is None:
|
if msg is None:
|
||||||
msg = '%r not greater than or equal to %r' % (got, expected)
|
msg = f'{got!r} not greater than or equal to {expected!r}'
|
||||||
self.assertTrue(got >= expected, msg)
|
self.assertTrue(got >= expected, msg)
|
||||||
|
|
||||||
|
|
||||||
def assertLessEqual(self, got, expected, msg=None):
|
def assertLessEqual(self, got, expected, msg=None):
|
||||||
if not (got <= expected):
|
if not (got <= expected):
|
||||||
if msg is None:
|
if msg is None:
|
||||||
msg = '%r not less than or equal to %r' % (got, expected)
|
msg = f'{got!r} not less than or equal to {expected!r}'
|
||||||
self.assertTrue(got <= expected, msg)
|
self.assertTrue(got <= expected, msg)
|
||||||
|
|
||||||
|
|
||||||
def assertEqual(self, got, expected, msg=None):
|
def assertEqual(self, got, expected, msg=None):
|
||||||
if not (got == expected):
|
if not (got == expected):
|
||||||
if msg is None:
|
if msg is None:
|
||||||
msg = '%r not equal to %r' % (got, expected)
|
msg = f'{got!r} not equal to {expected!r}'
|
||||||
self.assertTrue(got == expected, msg)
|
self.assertTrue(got == expected, msg)
|
||||||
|
|
||||||
|
|
||||||
def expect_warnings(ydl, warnings_re):
|
def expect_warnings(ydl, warnings_re):
|
||||||
real_warning = ydl.report_warning
|
real_warning = ydl.report_warning
|
||||||
|
|
||||||
def _report_warning(w):
|
def _report_warning(w, *args, **kwargs):
|
||||||
if not any(re.search(w_re, w) for w_re in warnings_re):
|
if not any(re.search(w_re, w) for w_re in warnings_re):
|
||||||
real_warning(w)
|
real_warning(w, *args, **kwargs)
|
||||||
|
|
||||||
ydl.report_warning = _report_warning
|
ydl.report_warning = _report_warning
|
||||||
|
|
||||||
@@ -330,3 +333,13 @@ def http_server_port(httpd):
|
|||||||
else:
|
else:
|
||||||
sock = httpd.socket
|
sock = httpd.socket
|
||||||
return sock.getsockname()[1]
|
return sock.getsockname()[1]
|
||||||
|
|
||||||
|
|
||||||
|
def verify_address_availability(address):
|
||||||
|
if find_available_port(address) is None:
|
||||||
|
pytest.skip(f'Unable to bind to source address {address} (address may not exist)')
|
||||||
|
|
||||||
|
|
||||||
|
def validate_and_send(rh, req):
|
||||||
|
rh.validate(req)
|
||||||
|
return rh.send(req)
|
||||||
|
|||||||
@@ -44,5 +44,6 @@
|
|||||||
"writesubtitles": false,
|
"writesubtitles": false,
|
||||||
"allsubtitles": false,
|
"allsubtitles": false,
|
||||||
"listsubtitles": false,
|
"listsubtitles": false,
|
||||||
"fixup": "never"
|
"fixup": "never",
|
||||||
|
"allow_playlist_files": false
|
||||||
}
|
}
|
||||||
|
|||||||
1
test/swftests.unused/.gitignore
vendored
1
test/swftests.unused/.gitignore
vendored
@@ -1 +0,0 @@
|
|||||||
*.swf
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
// input: [["a", "b", "c", "d"]]
|
|
||||||
// output: ["c", "b", "a", "d"]
|
|
||||||
|
|
||||||
package {
|
|
||||||
public class ArrayAccess {
|
|
||||||
public static function main(ar:Array):Array {
|
|
||||||
var aa:ArrayAccess = new ArrayAccess();
|
|
||||||
return aa.f(ar, 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
private function f(ar:Array, num:Number):Array{
|
|
||||||
var x:String = ar[0];
|
|
||||||
var y:String = ar[num % ar.length];
|
|
||||||
ar[0] = y;
|
|
||||||
ar[num] = x;
|
|
||||||
return ar;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
// input: []
|
|
||||||
// output: 121
|
|
||||||
|
|
||||||
package {
|
|
||||||
public class ClassCall {
|
|
||||||
public static function main():int{
|
|
||||||
var f:OtherClass = new OtherClass();
|
|
||||||
return f.func(100,20);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class OtherClass {
|
|
||||||
public function func(x: int, y: int):int {
|
|
||||||
return x+y+1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
// input: []
|
|
||||||
// output: 0
|
|
||||||
|
|
||||||
package {
|
|
||||||
public class ClassConstruction {
|
|
||||||
public static function main():int{
|
|
||||||
var f:Foo = new Foo();
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class Foo {
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
// input: []
|
|
||||||
// output: 4
|
|
||||||
|
|
||||||
package {
|
|
||||||
public class ConstArrayAccess {
|
|
||||||
private static const x:int = 2;
|
|
||||||
private static const ar:Array = ["42", "3411"];
|
|
||||||
|
|
||||||
public static function main():int{
|
|
||||||
var c:ConstArrayAccess = new ConstArrayAccess();
|
|
||||||
return c.f();
|
|
||||||
}
|
|
||||||
|
|
||||||
public function f(): int {
|
|
||||||
return ar[1].length;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
// input: []
|
|
||||||
// output: 2
|
|
||||||
|
|
||||||
package {
|
|
||||||
public class ConstantInt {
|
|
||||||
private static const x:int = 2;
|
|
||||||
|
|
||||||
public static function main():int{
|
|
||||||
return x;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
// input: [{"x": 1, "y": 2}]
|
|
||||||
// output: 3
|
|
||||||
|
|
||||||
package {
|
|
||||||
public class DictCall {
|
|
||||||
public static function main(d:Object):int{
|
|
||||||
return d.x + d.y;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
// input: []
|
|
||||||
// output: false
|
|
||||||
|
|
||||||
package {
|
|
||||||
public class EqualsOperator {
|
|
||||||
public static function main():Boolean{
|
|
||||||
return 1 == 2;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
// input: [1, 2]
|
|
||||||
// output: 3
|
|
||||||
|
|
||||||
package {
|
|
||||||
public class LocalVars {
|
|
||||||
public static function main(a:int, b:int):int{
|
|
||||||
var c:int = a + b + b;
|
|
||||||
var d:int = c - b;
|
|
||||||
var e:int = d;
|
|
||||||
return e;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
// input: [1]
|
|
||||||
// output: 2
|
|
||||||
|
|
||||||
package {
|
|
||||||
public class MemberAssignment {
|
|
||||||
public var v:int;
|
|
||||||
|
|
||||||
public function g():int {
|
|
||||||
return this.v;
|
|
||||||
}
|
|
||||||
|
|
||||||
public function f(a:int):int{
|
|
||||||
this.v = a;
|
|
||||||
return this.v + this.g();
|
|
||||||
}
|
|
||||||
|
|
||||||
public static function main(a:int): int {
|
|
||||||
var v:MemberAssignment = new MemberAssignment();
|
|
||||||
return v.f(a);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
// input: []
|
|
||||||
// output: 123
|
|
||||||
|
|
||||||
package {
|
|
||||||
public class NeOperator {
|
|
||||||
public static function main(): int {
|
|
||||||
var res:int = 0;
|
|
||||||
if (1 != 2) {
|
|
||||||
res += 3;
|
|
||||||
} else {
|
|
||||||
res += 4;
|
|
||||||
}
|
|
||||||
if (2 != 2) {
|
|
||||||
res += 10;
|
|
||||||
} else {
|
|
||||||
res += 20;
|
|
||||||
}
|
|
||||||
if (9 == 9) {
|
|
||||||
res += 100;
|
|
||||||
}
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user