mirror of https://github.com/yt-dlp/yt-dlp
Compare commits
763 Commits
2023.06.21
...
master
Author | SHA1 | Date |
---|---|---|
![]() |
f3411af12e | 1 day ago |
![]() |
a8520244b8 | 3 days ago |
![]() |
8ca1d57ed0 | 4 days ago |
![]() |
800ec085cc | 4 days ago |
![]() |
96472d72f2 | 4 days ago |
![]() |
7aa322c02c | 5 days ago |
![]() |
9bd8501993 | 5 days ago |
![]() |
90c3721a32 | 1 week ago |
![]() |
d4b52ce3fc | 1 week ago |
![]() |
d6c2c2bc84 | 1 week ago |
![]() |
5dbac313ae | 1 week ago |
![]() |
ca8885edd9 | 1 week ago |
![]() |
4093eb1fcc | 1 week ago |
![]() |
a0d9967f68 | 2 weeks ago |
![]() |
ea88129784 | 2 weeks ago |
![]() |
b8e2a5e0e1 | 2 weeks ago |
![]() |
e53e56b735 | 2 weeks ago |
![]() |
92a1c4abae | 2 weeks ago |
![]() |
3690c2f598 | 2 weeks ago |
![]() |
081708d607 | 2 weeks ago |
![]() |
d7d861811c | 2 weeks ago |
![]() |
46c1b7cfec | 2 weeks ago |
![]() |
add96eb9f8 | 2 weeks ago |
![]() |
db50f19d76 | 3 weeks ago |
![]() |
2e5a47da40 | 4 weeks ago |
![]() |
5fdd13006a | 4 weeks ago |
![]() |
03334d639d | 4 weeks ago |
![]() |
8b46ad4d8b | 4 weeks ago |
![]() |
bef9a9e536 | 4 weeks ago |
![]() |
111b61ddef | 4 weeks ago |
![]() |
12b248ce60 | 4 weeks ago |
![]() |
5e3e19c93c | 4 weeks ago |
![]() |
c53c2e40fd | 4 weeks ago |
![]() |
ae2194e1dd | 1 month ago |
![]() |
26603d0b34 | 1 month ago |
![]() |
ed274b60b1 | 1 month ago |
![]() |
ae2af1104f | 1 month ago |
![]() |
5c019f6328 | 1 month ago |
![]() |
5a2eebc767 | 1 month ago |
![]() |
119d41f270 | 1 month ago |
![]() |
347f13dd9b | 1 month ago |
![]() |
96a134dea6 | 1 month ago |
![]() |
a4da9db87b | 1 month ago |
![]() |
e897bd8292 | 1 month ago |
![]() |
a2e9031605 | 1 month ago |
![]() |
3ba8de62d6 | 1 month ago |
![]() |
0d067e77c3 | 1 month ago |
![]() |
1463945ae5 | 1 month ago |
![]() |
c92e4e625e | 1 month ago |
![]() |
90d2da311b | 1 month ago |
![]() |
3779f2a307 | 1 month ago |
![]() |
63b569bc5e | 1 month ago |
![]() |
82f4f4444e | 1 month ago |
![]() |
eead3bbc01 | 1 month ago |
![]() |
5bbfdb7c99 | 1 month ago |
![]() |
0dd53faeca | 1 month ago |
![]() |
be7db1a5a8 | 1 month ago |
![]() |
65e709d235 | 1 month ago |
![]() |
06cb063839 | 1 month ago |
![]() |
296df0da1d | 1 month ago |
![]() |
7b5674949f | 1 month ago |
![]() |
f2816634e3 | 1 month ago |
![]() |
beaf832c7a | 1 month ago |
![]() |
eef1e9f44f | 1 month ago |
![]() |
78c57cc0e0 | 1 month ago |
![]() |
3f7999533e | 1 month ago |
![]() |
4ccd73fea0 | 1 month ago |
![]() |
3584b8390b | 1 month ago |
![]() |
6e36d17f40 | 1 month ago |
![]() |
c36513f1be | 1 month ago |
![]() |
3e35aa32c7 | 1 month ago |
![]() |
53b4d44f55 | 1 month ago |
![]() |
c999bac02c | 1 month ago |
![]() |
12d8ea8246 | 1 month ago |
![]() |
8e15177b41 | 1 month ago |
![]() |
dd9ad97b1f | 1 month ago |
![]() |
61b17437dc | 1 month ago |
![]() |
7975ddf245 | 1 month ago |
![]() |
6d8a53d870 | 1 month ago |
![]() |
4813173e45 | 1 month ago |
![]() |
41ba4a808b | 1 month ago |
![]() |
351dc0bc33 | 1 month ago |
![]() |
518c1afc15 | 1 month ago |
![]() |
85ec2a337a | 1 month ago |
![]() |
b207d26f83 | 1 month ago |
![]() |
01395a3434 | 1 month ago |
![]() |
cf212d0a33 | 1 month ago |
![]() |
6db96268c5 | 2 months ago |
![]() |
800a43983e | 2 months ago |
![]() |
7e4259dff0 | 2 months ago |
![]() |
f1f158976e | 2 months ago |
![]() |
31b417e1d1 | 2 months ago |
![]() |
fc2879ecb0 | 2 months ago |
![]() |
0a1a8e3005 | 2 months ago |
![]() |
4cc99d7b6c | 2 months ago |
![]() |
3c7a287e28 | 2 months ago |
![]() |
98d71d8c5e | 2 months ago |
![]() |
00a9f2e1f7 | 2 months ago |
![]() |
73f12119b5 | 2 months ago |
![]() |
6b54cccdcb | 2 months ago |
![]() |
c4b87dd885 | 2 months ago |
![]() |
2338827072 | 2 months ago |
![]() |
06d52c8731 | 2 months ago |
![]() |
df5c9e733a | 2 months ago |
![]() |
b38018b781 | 2 months ago |
![]() |
145dc6f656 | 2 months ago |
![]() |
5904853ae5 | 2 months ago |
![]() |
c8bf48f3a8 | 2 months ago |
![]() |
351368cb9a | 2 months ago |
![]() |
96da952504 | 2 months ago |
![]() |
bec9a59e8e | 2 months ago |
![]() |
036e0d92c6 | 2 months ago |
![]() |
cb2fb4a643 | 2 months ago |
![]() |
231c2eacc4 | 2 months ago |
![]() |
c4853655cb | 2 months ago |
![]() |
ac817bc83e | 2 months ago |
![]() |
1a366403d9 | 2 months ago |
![]() |
7e26bd53f9 | 2 months ago |
![]() |
64766459e3 | 2 months ago |
![]() |
89f535e265 | 2 months ago |
![]() |
ff38a011d5 | 2 months ago |
![]() |
8056a3026e | 2 months ago |
![]() |
3ee1194288 | 2 months ago |
![]() |
e3b42d8b1b | 2 months ago |
![]() |
c9ce57d9bf | 2 months ago |
![]() |
02483bea1c | 2 months ago |
![]() |
315b354429 | 2 months ago |
![]() |
0c21c53885 | 2 months ago |
![]() |
168e72dcd3 | 3 months ago |
![]() |
ff07792676 | 3 months ago |
![]() |
216f6a3cb5 | 3 months ago |
![]() |
b19ae095fd | 3 months ago |
![]() |
9590cc6b47 | 3 months ago |
![]() |
79a451e576 | 3 months ago |
![]() |
df0e138fc0 | 3 months ago |
![]() |
2e94602f24 | 3 months ago |
![]() |
4af9d5c2f6 | 3 months ago |
![]() |
36b240f9a7 | 3 months ago |
![]() |
fc53ec13ff | 3 months ago |
![]() |
2ab2651a4a | 3 months ago |
![]() |
b15b0c1d21 | 3 months ago |
![]() |
c8a61a9100 | 3 months ago |
![]() |
f2fd449b46 | 3 months ago |
![]() |
9415f1a5ef | 3 months ago |
![]() |
a48cc86d6f | 3 months ago |
![]() |
954e57e405 | 3 months ago |
![]() |
9073ae6458 | 3 months ago |
![]() |
4cd9e251b9 | 3 months ago |
![]() |
0ae16ceb18 | 3 months ago |
![]() |
443e206ec4 | 3 months ago |
![]() |
4c3b7a0769 | 3 months ago |
![]() |
16be117729 | 3 months ago |
![]() |
b49d5ffc53 | 3 months ago |
![]() |
36baaa10e0 | 3 months ago |
![]() |
02f93ff51b | 3 months ago |
![]() |
c59de48e2b | 3 months ago |
![]() |
0284f1fee2 | 3 months ago |
![]() |
e8032503b9 | 3 months ago |
![]() |
97362712a1 | 3 months ago |
![]() |
246571ae1d | 3 months ago |
![]() |
32abfb00bd | 3 months ago |
![]() |
c305a25c1b | 3 months ago |
![]() |
e3a3ed8a98 | 3 months ago |
![]() |
a25a424323 | 3 months ago |
![]() |
86e3b82261 | 3 months ago |
![]() |
e7b17fce14 | 3 months ago |
![]() |
a2d0840739 | 3 months ago |
![]() |
86a972033e | 3 months ago |
![]() |
50c2935231 | 3 months ago |
![]() |
0df63cce69 | 3 months ago |
![]() |
63f685f341 | 3 months ago |
![]() |
3699eeb67c | 3 months ago |
![]() |
979ce2e786 | 3 months ago |
![]() |
58dd0f8d1e | 3 months ago |
![]() |
cb61e20c26 | 3 months ago |
![]() |
9c42b7eef5 | 3 months ago |
![]() |
e5d4f11104 | 3 months ago |
![]() |
bc2b8c0596 | 3 months ago |
![]() |
aa7e9ae4f4 | 3 months ago |
![]() |
07f5b2f757 | 3 months ago |
![]() |
ff349ff94a | 3 months ago |
![]() |
f859ed3ba1 | 3 months ago |
![]() |
17d248a587 | 3 months ago |
![]() |
388c979ac6 | 3 months ago |
![]() |
22e4dfacb6 | 3 months ago |
![]() |
86d2f4d248 | 3 months ago |
![]() |
52f5be1f1e | 3 months ago |
![]() |
0b81d4d252 | 3 months ago |
![]() |
f849d77ab5 | 3 months ago |
![]() |
f2868b26e9 | 3 months ago |
![]() |
be77923ffe | 3 months ago |
![]() |
8c05b3ebae | 3 months ago |
![]() |
0da66980d3 | 3 months ago |
![]() |
17b96974a3 | 3 months ago |
![]() |
8463fb510a | 4 months ago |
![]() |
615a84447e | 4 months ago |
![]() |
ed3bb2b0a1 | 4 months ago |
![]() |
45491a2a30 | 4 months ago |
![]() |
a687226b48 | 4 months ago |
![]() |
93240fc184 | 4 months ago |
![]() |
47ab66db0f | 4 months ago |
![]() |
0abf2f1f15 | 4 months ago |
![]() |
2d91b98456 | 4 months ago |
![]() |
8828f4576b | 4 months ago |
![]() |
dbd8b1bff9 | 4 months ago |
![]() |
8993721ecb | 4 months ago |
![]() |
263a4b55ac | 4 months ago |
![]() |
b136e2af34 | 4 months ago |
![]() |
b2cc150ad8 | 4 months ago |
![]() |
785ab1af7f | 4 months ago |
![]() |
7aad06541e | 4 months ago |
![]() |
d3d4187da9 | 4 months ago |
![]() |
c8c9039e64 | 4 months ago |
![]() |
df773c3d5d | 4 months ago |
![]() |
f4f9f6d00e | 4 months ago |
![]() |
dfd8c0b696 | 4 months ago |
![]() |
dd29e6e5fd | 4 months ago |
![]() |
96f3924bac | 4 months ago |
![]() |
0fcefb92f3 | 4 months ago |
![]() |
e4fbe5f886 | 4 months ago |
![]() |
cd7086c0d5 | 4 months ago |
![]() |
cf91400a1d | 4 months ago |
![]() |
ac340d0745 | 4 months ago |
![]() |
11ffa92a61 | 4 months ago |
![]() |
ede624d1db | 4 months ago |
![]() |
40966e8da2 | 4 months ago |
![]() |
eedb38ce40 | 4 months ago |
![]() |
6ad11fef65 | 4 months ago |
![]() |
f0426e9ca5 | 4 months ago |
![]() |
d9b4154cbc | 4 months ago |
![]() |
9749ac7fec | 4 months ago |
![]() |
413d367580 | 4 months ago |
![]() |
aa13a8e3dd | 4 months ago |
![]() |
8f423cf805 | 4 months ago |
![]() |
804f236611 | 4 months ago |
![]() |
f00c0def74 | 4 months ago |
![]() |
e546e5d3b3 | 4 months ago |
![]() |
4170b3d712 | 4 months ago |
![]() |
9ff9466455 | 4 months ago |
![]() |
e28e135d6f | 4 months ago |
![]() |
f1570ab84d | 4 months ago |
![]() |
069b2aedae | 4 months ago |
![]() |
5eedc208ec | 4 months ago |
![]() |
464c919ea8 | 4 months ago |
![]() |
3894ab9574 | 4 months ago |
![]() |
b05640d532 | 4 months ago |
![]() |
7a29cbbd5f | 4 months ago |
![]() |
2e8de097ad | 4 months ago |
![]() |
f3d5face83 | 4 months ago |
![]() |
eabbccc439 | 4 months ago |
![]() |
0de09c5b9e | 4 months ago |
![]() |
6a6cdcd182 | 4 months ago |
![]() |
998dffb5a2 | 4 months ago |
![]() |
29a74a6126 | 4 months ago |
![]() |
55f1833376 | 4 months ago |
![]() |
3d9dc2f359 | 4 months ago |
![]() |
28e53d60df | 4 months ago |
![]() |
f591e605df | 4 months ago |
![]() |
9a8afadd17 | 4 months ago |
![]() |
104a7b5a46 | 4 months ago |
![]() |
7e90e34fa4 | 4 months ago |
![]() |
4ce57d3b87 | 4 months ago |
![]() |
ffff1bc659 | 4 months ago |
![]() |
4f04347909 | 4 months ago |
![]() |
4392447d94 | 4 months ago |
![]() |
43cfd462c0 | 4 months ago |
![]() |
974d444039 | 4 months ago |
![]() |
80ed8bdeba | 4 months ago |
![]() |
de954c1b4d | 4 months ago |
![]() |
0085e2bab8 | 4 months ago |
![]() |
73fcfa39f5 | 4 months ago |
![]() |
41d6b61e98 | 4 months ago |
![]() |
0bee29493c | 4 months ago |
![]() |
644738ddaa | 4 months ago |
![]() |
c168d8791d | 4 months ago |
![]() |
ddd4b5e10a | 4 months ago |
![]() |
f788149237 | 4 months ago |
![]() |
017adb28e7 | 4 months ago |
![]() |
2e30b5567b | 4 months ago |
![]() |
beaa1a4455 | 4 months ago |
![]() |
fb44020fa9 | 4 months ago |
![]() |
3dc9232e1a | 4 months ago |
![]() |
9401736fd0 | 4 months ago |
![]() |
cd0443fb14 | 4 months ago |
![]() |
03536126d3 | 4 months ago |
![]() |
1ed5ee2f04 | 4 months ago |
![]() |
3876429d72 | 5 months ago |
![]() |
b0059f0413 | 5 months ago |
![]() |
b14e818b37 | 5 months ago |
![]() |
867f637b95 | 5 months ago |
![]() |
920397634d | 5 months ago |
![]() |
b8a433aaca | 5 months ago |
![]() |
fd647775e2 | 5 months ago |
![]() |
775cde82dc | 5 months ago |
![]() |
868d2f60a7 | 5 months ago |
![]() |
a1b7784289 | 5 months ago |
![]() |
882e3b753c | 5 months ago |
![]() |
540b682981 | 5 months ago |
![]() |
05420227aa | 5 months ago |
![]() |
35d96982f1 | 5 months ago |
![]() |
acaf806c15 | 5 months ago |
![]() |
07256b9fee | 5 months ago |
![]() |
e439693f72 | 5 months ago |
![]() |
96d0f8c1cb | 5 months ago |
![]() |
e3ce2b385e | 5 months ago |
![]() |
4253e3b7f4 | 5 months ago |
![]() |
8e765755f7 | 5 months ago |
![]() |
ffa017cfc5 | 5 months ago |
![]() |
a0d50aabc5 | 5 months ago |
![]() |
2f4b575946 | 5 months ago |
![]() |
fc2cc626f0 | 5 months ago |
![]() |
a2bac6b7ad | 5 months ago |
![]() |
4b8b0dded8 | 5 months ago |
![]() |
4a6ff0b47a | 5 months ago |
![]() |
62c65bfaf8 | 5 months ago |
![]() |
d63eae7e7f | 5 months ago |
![]() |
2792092afd | 5 months ago |
![]() |
cbed249aaa | 5 months ago |
![]() |
3725b4f0c9 | 5 months ago |
![]() |
67bb70cd70 | 5 months ago |
![]() |
9b5efaf86b | 5 months ago |
![]() |
999ea80beb | 5 months ago |
![]() |
41b6cdb419 | 5 months ago |
![]() |
02e343f6ef | 5 months ago |
![]() |
a514cc2feb | 5 months ago |
![]() |
87286e93af | 5 months ago |
![]() |
3c4d3ee491 | 5 months ago |
![]() |
5b68c478fb | 5 months ago |
![]() |
9526b1f179 | 5 months ago |
![]() |
0023af81fb | 5 months ago |
![]() |
cae6e46107 | 5 months ago |
![]() |
c91d8b1899 | 5 months ago |
![]() |
77c2472ca1 | 5 months ago |
![]() |
d79c7e9937 | 5 months ago |
![]() |
5dda3b291f | 5 months ago |
![]() |
5f25f348f9 | 5 months ago |
![]() |
a40b0070c2 | 5 months ago |
![]() |
9cd9044790 | 5 months ago |
![]() |
f0e8bc7c60 | 5 months ago |
![]() |
c099ec9392 | 5 months ago |
![]() |
c0ecceeefe | 5 months ago |
![]() |
3e083191cd | 5 months ago |
![]() |
9f1e9dab21 | 5 months ago |
![]() |
5a63454b36 | 5 months ago |
![]() |
fcaa2e735b | 5 months ago |
![]() |
35f4f764a7 | 5 months ago |
![]() |
f24e44e8cb | 5 months ago |
![]() |
811d298b23 | 5 months ago |
![]() |
69d3191495 | 5 months ago |
![]() |
50e06e21a6 | 5 months ago |
![]() |
4310b6650e | 5 months ago |
![]() |
1713c88273 | 5 months ago |
![]() |
4a07a455bb | 5 months ago |
![]() |
5eb1458be4 | 5 months ago |
![]() |
1a36dbad71 | 5 months ago |
![]() |
12f0427405 | 5 months ago |
![]() |
5154dc0a68 | 5 months ago |
![]() |
8ab8465083 | 5 months ago |
![]() |
e641aab7a6 | 5 months ago |
![]() |
20cdad5a2c | 5 months ago |
![]() |
43694ce13c | 5 months ago |
![]() |
8226a3818f | 5 months ago |
![]() |
c51316f8a6 | 5 months ago |
![]() |
a281beba8d | 5 months ago |
![]() |
ba6b0c8261 | 5 months ago |
![]() |
6171b050d7 | 5 months ago |
![]() |
aa5dcc4ee6 | 5 months ago |
![]() |
5e2e24b2c5 | 5 months ago |
![]() |
fee2d8d9c3 | 5 months ago |
![]() |
cf9af2c7f1 | 5 months ago |
![]() |
cf6413e840 | 5 months ago |
![]() |
5498729c59 | 5 months ago |
![]() |
393b487a4e | 5 months ago |
![]() |
4d9dc0abe2 | 5 months ago |
![]() |
014cb5774d | 5 months ago |
![]() |
8e6e365172 | 5 months ago |
![]() |
95e82347b3 | 6 months ago |
![]() |
5b8c69ae04 | 6 months ago |
![]() |
5af1f19787 | 6 months ago |
![]() |
b6951271ac | 6 months ago |
![]() |
ffbd4f2a02 | 6 months ago |
![]() |
292d60b1ed | 6 months ago |
![]() |
85b33f5c16 | 6 months ago |
![]() |
85a2d07c1f | 6 months ago |
![]() |
9f40cd2896 | 6 months ago |
![]() |
f10589e345 | 6 months ago |
![]() |
f9fb3ce86e | 6 months ago |
![]() |
5f009a094f | 6 months ago |
![]() |
225cf2b830 | 6 months ago |
![]() |
2d1d683a54 | 6 months ago |
![]() |
65de7d204c | 6 months ago |
![]() |
c39358a54b | 6 months ago |
![]() |
1f8bd8eba8 | 6 months ago |
![]() |
00cdda4f6f | 6 months ago |
![]() |
116c268438 | 6 months ago |
![]() |
e7d22348e7 | 6 months ago |
![]() |
50eaea9fd7 | 6 months ago |
![]() |
f45c4efcd9 | 6 months ago |
![]() |
13b3cb3c2b | 6 months ago |
![]() |
0d531c35ec | 6 months ago |
![]() |
bc4ab17b38 | 6 months ago |
![]() |
632b8ee54e | 6 months ago |
![]() |
c919b68f7e | 6 months ago |
![]() |
19741ab8a4 | 6 months ago |
![]() |
37755a037e | 6 months ago |
![]() |
196eb0fe77 | 6 months ago |
![]() |
db8b4edc7d | 6 months ago |
![]() |
1c54a98e19 | 6 months ago |
![]() |
00a3e47bf5 | 6 months ago |
![]() |
c5f01bf7d4 | 6 months ago |
![]() |
c91af948e4 | 6 months ago |
![]() |
6b5d93b0b0 | 7 months ago |
![]() |
298230e550 | 7 months ago |
![]() |
d5d1517e7d | 7 months ago |
![]() |
7e09c147fd | 7 months ago |
![]() |
e370f9ec36 | 7 months ago |
![]() |
b1a1ec1540 | 7 months ago |
![]() |
0b6f829b1d | 7 months ago |
![]() |
f98a3305eb | 7 months ago |
![]() |
04a5e06350 | 7 months ago |
![]() |
b03c89309e | 7 months ago |
![]() |
71f28097fe | 7 months ago |
![]() |
044886c220 | 7 months ago |
![]() |
993edd3f6e | 7 months ago |
![]() |
6a9c7a2b52 | 7 months ago |
![]() |
a174c453ee | 7 months ago |
![]() |
15f22b4880 | 7 months ago |
![]() |
9751a457cf | 7 months ago |
![]() |
5a230233d6 | 7 months ago |
![]() |
4903f452b6 | 7 months ago |
![]() |
ff2fde1b8f | 7 months ago |
![]() |
deeb13eae8 | 7 months ago |
![]() |
bb5a54e6db | 7 months ago |
![]() |
628fa244bb | 7 months ago |
![]() |
9cafb9ff17 | 7 months ago |
![]() |
1732eccc0a | 7 months ago |
![]() |
a0b19d319a | 7 months ago |
![]() |
cc07f5cc85 | 7 months ago |
![]() |
ccfd70f4c2 | 7 months ago |
![]() |
45d82be65f | 7 months ago |
![]() |
3237f8ba29 | 7 months ago |
![]() |
1725e943b0 | 7 months ago |
![]() |
9f09bdcfcb | 7 months ago |
![]() |
f124fa4588 | 7 months ago |
![]() |
585d0ed9ab | 7 months ago |
![]() |
1fa3f24d4b | 7 months ago |
![]() |
ddb2d7588b | 7 months ago |
![]() |
f223b1b078 | 7 months ago |
![]() |
6fe82491ed | 7 months ago |
![]() |
34df1c1f60 | 7 months ago |
![]() |
1d24da6c89 | 7 months ago |
![]() |
66a0127d45 | 7 months ago |
![]() |
3f90813f06 | 7 months ago |
![]() |
64de1a4c25 | 7 months ago |
![]() |
f96ab86cd8 | 7 months ago |
![]() |
f4b95acafc | 7 months ago |
![]() |
fe6c82ccff | 7 months ago |
![]() |
24f827875c | 7 months ago |
![]() |
15cb3528cb | 7 months ago |
![]() |
2325d03aa7 | 7 months ago |
![]() |
e569c2d1f4 | 7 months ago |
![]() |
a489f07150 | 7 months ago |
![]() |
5efe68b73c | 7 months ago |
![]() |
b530118e7f | 7 months ago |
![]() |
dcfad52812 | 7 months ago |
![]() |
0783fd558e | 7 months ago |
![]() |
0f634dba3a | 7 months ago |
![]() |
21dc069bea | 7 months ago |
![]() |
5d3a3cd493 | 7 months ago |
![]() |
a9d3f4b20a | 7 months ago |
![]() |
b012271d01 | 7 months ago |
![]() |
f04b5bedad | 7 months ago |
![]() |
d4f14a72dc | 7 months ago |
![]() |
87264d4fda | 8 months ago |
![]() |
a00af29853 | 8 months ago |
![]() |
0b6ad22e6a | 8 months ago |
![]() |
5438593a35 | 8 months ago |
![]() |
9970d74c83 | 8 months ago |
![]() |
20314dd46f | 8 months ago |
![]() |
1d03633c5a | 8 months ago |
![]() |
8afd9468b0 | 8 months ago |
![]() |
ef12dbdcd3 | 8 months ago |
![]() |
46acc418a5 | 8 months ago |
![]() |
6ba3085616 | 8 months ago |
![]() |
f6e97090d2 | 8 months ago |
![]() |
2863fcf2b6 | 8 months ago |
![]() |
c76c96677f | 8 months ago |
![]() |
15b252dfd2 | 8 months ago |
![]() |
312a2d1e8b | 8 months ago |
![]() |
54579be436 | 8 months ago |
![]() |
05adfd883a | 8 months ago |
![]() |
3ff494f6f4 | 8 months ago |
![]() |
9b5bedf13a | 8 months ago |
![]() |
cb480e390d | 8 months ago |
![]() |
25a4bd345a | 8 months ago |
![]() |
3906de0755 | 8 months ago |
![]() |
7d337ca977 | 8 months ago |
![]() |
10025b715e | 8 months ago |
![]() |
595ea4a99b | 8 months ago |
![]() |
2622c804d1 | 8 months ago |
![]() |
fd8fcf8f4f | 8 months ago |
![]() |
21b25281c5 | 8 months ago |
![]() |
4a601c9eff | 8 months ago |
![]() |
464327acdb | 8 months ago |
![]() |
ef79d20dc9 | 8 months ago |
![]() |
39abae2354 | 8 months ago |
![]() |
4ce2f29a50 | 8 months ago |
![]() |
177f0d963e | 8 months ago |
![]() |
8e02a4dcc8 | 8 months ago |
![]() |
7b8b1cf5eb | 8 months ago |
![]() |
a40e0b37df | 8 months ago |
![]() |
4e38e2ae9d | 8 months ago |
![]() |
8a8b54523a | 9 months ago |
![]() |
700444c23d | 9 months ago |
![]() |
b73c409318 | 9 months ago |
![]() |
b634ba742d | 9 months ago |
![]() |
2acd1d555e | 9 months ago |
![]() |
b286ec68f1 | 9 months ago |
![]() |
e030b6b6fb | 9 months ago |
![]() |
b931664231 | 9 months ago |
![]() |
feebf6d02f | 9 months ago |
![]() |
84e26038d4 | 9 months ago |
![]() |
4de94b9e16 | 9 months ago |
![]() |
88a99c87b6 | 9 months ago |
![]() |
09f815ad52 | 9 months ago |
![]() |
b7098d46b5 | 9 months ago |
![]() |
1c51c520f7 | 9 months ago |
![]() |
9d7ded6419 | 9 months ago |
![]() |
4392c4680c | 9 months ago |
![]() |
377e85a179 | 9 months ago |
![]() |
03e85ea99d | 9 months ago |
![]() |
792f1e64f6 | 9 months ago |
![]() |
19c90e405b | 9 months ago |
![]() |
e831c80e8b | 9 months ago |
![]() |
0e722f2f3c | 9 months ago |
![]() |
47c598783c | 9 months ago |
![]() |
35d9cbaf96 | 9 months ago |
![]() |
2ad3873f0d | 9 months ago |
![]() |
2f2dda3a7e | 9 months ago |
![]() |
fbcc299bd8 | 9 months ago |
![]() |
48cceec1dd | 9 months ago |
![]() |
a9efb4b8d7 | 9 months ago |
![]() |
f980df734c | 9 months ago |
![]() |
91a670a4f7 | 9 months ago |
![]() |
b095fd3fa9 | 9 months ago |
![]() |
0730d5a966 | 9 months ago |
![]() |
cc8d844152 | 9 months ago |
![]() |
eb5bdbfa70 | 9 months ago |
![]() |
c54ddfba0f | 9 months ago |
![]() |
088add9567 | 9 months ago |
![]() |
de015e9307 | 9 months ago |
![]() |
61bdf15fc7 | 9 months ago |
![]() |
1eaca74bc2 | 9 months ago |
![]() |
92feb5654c | 9 months ago |
![]() |
698beb9a49 | 9 months ago |
![]() |
15591940ff | 9 months ago |
![]() |
6636021206 | 9 months ago |
![]() |
eaee21bf71 | 9 months ago |
![]() |
5ca095cbcd | 9 months ago |
![]() |
c2da0b5ea2 | 9 months ago |
![]() |
c1d71d0d9f | 9 months ago |
![]() |
661c9a1d02 | 9 months ago |
![]() |
568f080518 | 9 months ago |
![]() |
904a19ee93 | 9 months ago |
![]() |
52414d64ca | 9 months ago |
![]() |
2269065ad6 | 9 months ago |
![]() |
a5e264d74b | 9 months ago |
![]() |
b84fda7388 | 9 months ago |
![]() |
5fccabac27 | 9 months ago |
![]() |
21f40e75df | 9 months ago |
![]() |
b3febedbeb | 9 months ago |
![]() |
295fbb3ae3 | 9 months ago |
![]() |
35f9a306e6 | 9 months ago |
![]() |
9d6254069c | 9 months ago |
![]() |
b532556d0a | 9 months ago |
![]() |
cf11b40ac4 | 9 months ago |
![]() |
40999467f7 | 9 months ago |
![]() |
8ac5b6d96a | 9 months ago |
![]() |
69b03f84f8 | 9 months ago |
![]() |
9e68747f96 | 9 months ago |
![]() |
ba8e9eb2c8 | 9 months ago |
![]() |
20fbbd9249 | 9 months ago |
![]() |
81f46ac573 | 9 months ago |
![]() |
63e0c5748c | 9 months ago |
![]() |
efa2339502 | 9 months ago |
![]() |
58493923e9 | 9 months ago |
![]() |
30ba233d4c | 9 months ago |
![]() |
836e06d246 | 9 months ago |
![]() |
94389b225d | 9 months ago |
![]() |
9652bca1bd | 9 months ago |
![]() |
538d37671a | 9 months ago |
![]() |
2da7bcca16 | 9 months ago |
![]() |
eda0e415d2 | 9 months ago |
![]() |
20c3c9b433 | 9 months ago |
![]() |
635ae31f68 | 9 months ago |
![]() |
5367585219 | 9 months ago |
![]() |
308936619c | 9 months ago |
![]() |
5be7e97886 | 9 months ago |
![]() |
b4c1c408c6 | 9 months ago |
![]() |
23d829a342 | 9 months ago |
![]() |
0ce1f48bf1 | 9 months ago |
![]() |
ecef42c3ad | 9 months ago |
![]() |
a83da3717d | 9 months ago |
![]() |
9d376c4dae | 9 months ago |
![]() |
5336bf57a7 | 9 months ago |
![]() |
9bf14be775 | 9 months ago |
![]() |
cebbd33b1c | 9 months ago |
![]() |
069cbece9d | 9 months ago |
![]() |
f659e64394 | 9 months ago |
![]() |
7d3d658f4c | 9 months ago |
![]() |
98eac0e6ba | 9 months ago |
![]() |
6e07e4bc7e | 9 months ago |
![]() |
aee6b9b88c | 9 months ago |
![]() |
578a82e497 | 9 months ago |
![]() |
497bbbbd73 | 9 months ago |
![]() |
7b71643cc9 | 9 months ago |
![]() |
66cc64ff66 | 10 months ago |
![]() |
a006ce2b27 | 10 months ago |
![]() |
5d0395498d | 10 months ago |
![]() |
fe371dcf0b | 10 months ago |
![]() |
d3d81cc98f | 10 months ago |
![]() |
99c99c7185 | 10 months ago |
![]() |
c6ef553792 | 10 months ago |
![]() |
69dbfe01c4 | 10 months ago |
![]() |
2301b5c1b7 | 10 months ago |
![]() |
77bff23ee9 | 10 months ago |
![]() |
7237c8dca0 | 10 months ago |
![]() |
30ea88591b | 10 months ago |
![]() |
630a55df8d | 10 months ago |
![]() |
bae4834245 | 10 months ago |
![]() |
099fb1b35c | 10 months ago |
![]() |
4b3a6ef1b3 | 10 months ago |
![]() |
665876034c | 10 months ago |
![]() |
b9f2bc2dbe | 10 months ago |
![]() |
c2d8ee0000 | 10 months ago |
![]() |
56b3dc0335 | 10 months ago |
![]() |
d7aee8e310 | 10 months ago |
![]() |
59e92b1f18 | 10 months ago |
![]() |
1be0a96a4d | 10 months ago |
![]() |
fcd6a76adc | 10 months ago |
![]() |
7cccab79e7 | 10 months ago |
![]() |
ed71189781 | 10 months ago |
![]() |
a0de8bb860 | 10 months ago |
![]() |
876b70c8ed | 11 months ago |
![]() |
339c339fec | 11 months ago |
![]() |
dab87ca236 | 11 months ago |
![]() |
378ae9f9fb | 11 months ago |
![]() |
db7b054a61 | 11 months ago |
![]() |
db97438940 | 11 months ago |
![]() |
b9de629d78 | 11 months ago |
![]() |
a854fbec56 | 11 months ago |
![]() |
30b29f3715 | 11 months ago |
![]() |
6d6081dda1 | 11 months ago |
![]() |
6014355c61 | 11 months ago |
![]() |
f73c118035 | 11 months ago |
![]() |
546b2c28a1 | 11 months ago |
![]() |
6148833f5c | 11 months ago |
![]() |
8cb7fc44db | 11 months ago |
![]() |
3f7965105d | 11 months ago |
![]() |
de20687ee6 | 11 months ago |
![]() |
b09bd0c196 | 11 months ago |
![]() |
127a224606 | 11 months ago |
![]() |
86eeb044c2 | 11 months ago |
![]() |
9a04113dfb | 11 months ago |
![]() |
ba06d77a31 | 11 months ago |
![]() |
4bf912282a | 11 months ago |
![]() |
a15fcd299e | 11 months ago |
![]() |
c03a58ec99 | 11 months ago |
![]() |
bbeacff7fc | 11 months ago |
![]() |
dae349da97 | 11 months ago |
![]() |
95abea9a03 | 11 months ago |
![]() |
550e65410a | 11 months ago |
![]() |
39837ae319 | 11 months ago |
![]() |
86aea0d3a2 | 11 months ago |
![]() |
11de6fec9c | 11 months ago |
![]() |
a250b24733 | 11 months ago |
![]() |
25b6e8f946 | 11 months ago |
![]() |
e705738338 | 11 months ago |
![]() |
62b5c94cad | 11 months ago |
![]() |
e0c4db04dc | 11 months ago |
![]() |
81b4712bca | 11 months ago |
![]() |
994f7ef8e6 | 11 months ago |
![]() |
a264433c9f | 11 months ago |
![]() |
9f66247289 | 11 months ago |
![]() |
e57eb98222 | 11 months ago |
![]() |
9b16762f48 | 11 months ago |
![]() |
65cfa2b057 | 11 months ago |
![]() |
f4ea501551 | 11 months ago |
![]() |
af86873218 | 11 months ago |
![]() |
75dc8e673b | 11 months ago |
![]() |
71baa490eb | 11 months ago |
![]() |
613dbce177 | 12 months ago |
![]() |
bb5d84c9d2 | 12 months ago |
![]() |
1d3d579c21 | 12 months ago |
![]() |
42ded0a429 | 12 months ago |
![]() |
6c5211cebe | 12 months ago |
![]() |
2b029ca0a9 | 12 months ago |
![]() |
131d132da5 | 12 months ago |
![]() |
3d2623a898 | 12 months ago |
![]() |
227bf1a33b | 12 months ago |
![]() |
c365dba843 | 12 months ago |
![]() |
1b392f905d | 12 months ago |
![]() |
1ba6fe9db5 | 12 months ago |
![]() |
1bcb9fe871 | 12 months ago |
![]() |
8a4cd12c8f | 12 months ago |
![]() |
2cfe221fbb | 12 months ago |
![]() |
2af4eeb772 | 12 months ago |
![]() |
325191d0c9 | 12 months ago |
![]() |
bdd0b75e3f | 12 months ago |
![]() |
92315c0377 | 12 months ago |
![]() |
b03fa78345 | 12 months ago |
![]() |
cc0619f62d | 12 months ago |
![]() |
b532a34810 | 12 months ago |
![]() |
3121512228 | 12 months ago |
![]() |
f8b4bcc0a7 | 12 months ago |
![]() |
1ceb657bdd | 12 months ago |
![]() |
ad8902f616 | 12 months ago |
![]() |
94ed638a43 | 12 months ago |
![]() |
bc344cd456 | 12 months ago |
![]() |
906c0bdcd8 | 12 months ago |
![]() |
337734d4a8 | 12 months ago |
![]() |
fa44802809 | 12 months ago |
![]() |
47bcd43724 | 12 months ago |
![]() |
662ef1e910 | 12 months ago |
![]() |
6355b5f1e1 | 12 months ago |
![]() |
90db9a3c00 | 12 months ago |
![]() |
49296437a8 | 12 months ago |
![]() |
1cffd621cb | 12 months ago |
![]() |
3b7f5300c5 | 12 months ago |
![]() |
4dc4d8473c | 12 months ago |
![]() |
8776349ef6 | 12 months ago |
![]() |
af1fd12f67 | 1 year ago |
![]() |
fcbc9ed760 | 1 year ago |
![]() |
a2be9781fb | 1 year ago |
![]() |
8f05fbae2a | 1 year ago |
![]() |
5b4b92769a | 1 year ago |
![]() |
91302ed349 | 1 year ago |
![]() |
f393bbe724 | 1 year ago |
![]() |
8a8af356e3 | 1 year ago |
![]() |
d949c10c45 | 1 year ago |
![]() |
ef8509c300 | 1 year ago |
![]() |
5e16cf92eb | 1 year ago |
![]() |
f0a1ff1181 | 1 year ago |
![]() |
58786a10f2 | 1 year ago |
![]() |
e59e20744e | 1 year ago |
![]() |
89bed01374 | 1 year ago |
![]() |
de4cf77ec1 | 1 year ago |
![]() |
812cdfa06c | 1 year ago |
![]() |
cd810afe2a | 1 year ago |
![]() |
b4e0d75848 | 1 year ago |
![]() |
71dc18fa29 | 1 year ago |
![]() |
98cb1eda7a | 1 year ago |
![]() |
774aa09dd6 | 1 year ago |
![]() |
f2ff0f6f19 | 1 year ago |
![]() |
5fd8367496 | 1 year ago |
![]() |
0dff8e4d1e | 1 year ago |
![]() |
1e75d97db2 | 1 year ago |
![]() |
81ca451480 | 1 year ago |
![]() |
a4486bfc1d | 1 year ago |
![]() |
3f756c8c40 | 1 year ago |
![]() |
7f9c6a63b1 | 1 year ago |
![]() |
db22142f6f | 1 year ago |
![]() |
d7cd97e8d8 | 1 year ago |
File diff suppressed because one or more lines are too long
Before Width: | Height: | Size: 24 KiB After Width: | Height: | Size: 15 KiB |
@ -1,20 +0,0 @@
|
|||||||
name: Potential Duplicates
|
|
||||||
on:
|
|
||||||
issues:
|
|
||||||
types: [opened, edited]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
run:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: wow-actions/potential-duplicates@v1
|
|
||||||
with:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
label: potential-duplicate
|
|
||||||
state: all
|
|
||||||
threshold: 0.3
|
|
||||||
comment: |
|
|
||||||
This issue is potentially a duplicate of one of the following issues:
|
|
||||||
{{#issues}}
|
|
||||||
- #{{ number }} ({{ accuracy }}%)
|
|
||||||
{{/issues}}
|
|
@ -1,97 +0,0 @@
|
|||||||
name: Publish
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
channel:
|
|
||||||
default: stable
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
version:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
target_commitish:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
prerelease:
|
|
||||||
default: false
|
|
||||||
required: true
|
|
||||||
type: boolean
|
|
||||||
secrets:
|
|
||||||
ARCHIVE_REPO_TOKEN:
|
|
||||||
required: false
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
publish:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/download-artifact@v3
|
|
||||||
- uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: "3.10"
|
|
||||||
|
|
||||||
- name: Generate release notes
|
|
||||||
run: |
|
|
||||||
printf '%s' \
|
|
||||||
'[![Installation](https://img.shields.io/badge/-Which%20file%20should%20I%20download%3F-white.svg?style=for-the-badge)]' \
|
|
||||||
'(https://github.com/yt-dlp/yt-dlp#installation "Installation instructions") ' \
|
|
||||||
'[![Documentation](https://img.shields.io/badge/-Docs-brightgreen.svg?style=for-the-badge&logo=GitBook&labelColor=555555)]' \
|
|
||||||
'(https://github.com/yt-dlp/yt-dlp/tree/2023.03.04#readme "Documentation") ' \
|
|
||||||
'[![Donate](https://img.shields.io/badge/_-Donate-red.svg?logo=githubsponsors&labelColor=555555&style=for-the-badge)]' \
|
|
||||||
'(https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators "Donate") ' \
|
|
||||||
'[![Discord](https://img.shields.io/discord/807245652072857610?color=blue&labelColor=555555&label=&logo=discord&style=for-the-badge)]' \
|
|
||||||
'(https://discord.gg/H5MNcFW63r "Discord") ' \
|
|
||||||
${{ inputs.channel != 'nightly' && '"[![Nightly](https://img.shields.io/badge/Get%20nightly%20builds-purple.svg?style=for-the-badge)]" \
|
|
||||||
"(https://github.com/yt-dlp/yt-dlp-nightly-builds/releases/latest \"Nightly builds\")"' || '' }} \
|
|
||||||
> ./RELEASE_NOTES
|
|
||||||
printf '\n\n' >> ./RELEASE_NOTES
|
|
||||||
cat >> ./RELEASE_NOTES << EOF
|
|
||||||
#### A description of the various files are in the [README](https://github.com/yt-dlp/yt-dlp#release-files)
|
|
||||||
---
|
|
||||||
$(python ./devscripts/make_changelog.py -vv --collapsible)
|
|
||||||
EOF
|
|
||||||
printf '%s\n\n' '**This is an automated nightly pre-release build**' >> ./NIGHTLY_NOTES
|
|
||||||
cat ./RELEASE_NOTES >> ./NIGHTLY_NOTES
|
|
||||||
printf '%s\n\n' 'Generated from: https://github.com/${{ github.repository }}/commit/${{ inputs.target_commitish }}' >> ./ARCHIVE_NOTES
|
|
||||||
cat ./RELEASE_NOTES >> ./ARCHIVE_NOTES
|
|
||||||
|
|
||||||
- name: Archive nightly release
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.ARCHIVE_REPO_TOKEN }}
|
|
||||||
GH_REPO: ${{ vars.ARCHIVE_REPO }}
|
|
||||||
if: |
|
|
||||||
inputs.channel == 'nightly' && env.GH_TOKEN != '' && env.GH_REPO != ''
|
|
||||||
run: |
|
|
||||||
gh release create \
|
|
||||||
--notes-file ARCHIVE_NOTES \
|
|
||||||
--title "yt-dlp nightly ${{ inputs.version }}" \
|
|
||||||
${{ inputs.version }} \
|
|
||||||
artifact/*
|
|
||||||
|
|
||||||
- name: Prune old nightly release
|
|
||||||
if: inputs.channel == 'nightly' && !vars.ARCHIVE_REPO
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ github.token }}
|
|
||||||
run: |
|
|
||||||
gh release delete --yes --cleanup-tag "nightly" || true
|
|
||||||
git tag --delete "nightly" || true
|
|
||||||
sleep 5 # Enough time to cover deletion race condition
|
|
||||||
|
|
||||||
- name: Publish release${{ inputs.channel == 'nightly' && ' (nightly)' || '' }}
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ github.token }}
|
|
||||||
if: (inputs.channel == 'nightly' && !vars.ARCHIVE_REPO) || inputs.channel != 'nightly'
|
|
||||||
run: |
|
|
||||||
gh release create \
|
|
||||||
--notes-file ${{ inputs.channel == 'nightly' && 'NIGHTLY_NOTES' || 'RELEASE_NOTES' }} \
|
|
||||||
--target ${{ inputs.target_commitish }} \
|
|
||||||
--title "yt-dlp ${{ inputs.channel == 'nightly' && 'nightly ' || '' }}${{ inputs.version }}" \
|
|
||||||
${{ inputs.prerelease && '--prerelease' || '' }} \
|
|
||||||
${{ inputs.channel == 'nightly' && '"nightly"' || inputs.version }} \
|
|
||||||
artifact/*
|
|
@ -0,0 +1,30 @@
|
|||||||
|
name: Release (master)
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
paths:
|
||||||
|
- "yt_dlp/**.py"
|
||||||
|
- "!yt_dlp/version.py"
|
||||||
|
- "bundle/*.py"
|
||||||
|
- "pyproject.toml"
|
||||||
|
- "Makefile"
|
||||||
|
- ".github/workflows/build.yml"
|
||||||
|
concurrency:
|
||||||
|
group: release-master
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
release:
|
||||||
|
if: vars.BUILD_MASTER != ''
|
||||||
|
uses: ./.github/workflows/release.yml
|
||||||
|
with:
|
||||||
|
prerelease: true
|
||||||
|
source: master
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
packages: write # For package cache
|
||||||
|
actions: write # For cleaning up cache
|
||||||
|
id-token: write # mandatory for trusted publishing
|
||||||
|
secrets: inherit
|
@ -1,52 +1,43 @@
|
|||||||
name: Release (nightly)
|
name: Release (nightly)
|
||||||
on:
|
on:
|
||||||
push:
|
schedule:
|
||||||
branches:
|
- cron: '23 23 * * *'
|
||||||
- master
|
|
||||||
paths:
|
|
||||||
- "yt_dlp/**.py"
|
|
||||||
- "!yt_dlp/version.py"
|
|
||||||
concurrency:
|
|
||||||
group: release-nightly
|
|
||||||
cancel-in-progress: true
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
prepare:
|
check_nightly:
|
||||||
if: vars.BUILD_NIGHTLY != ''
|
if: vars.BUILD_NIGHTLY != ''
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
version: ${{ steps.get_version.outputs.version }}
|
commit: ${{ steps.check_for_new_commits.outputs.commit }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Get version
|
with:
|
||||||
id: get_version
|
fetch-depth: 0
|
||||||
|
- name: Check for new commits
|
||||||
|
id: check_for_new_commits
|
||||||
run: |
|
run: |
|
||||||
python devscripts/update-version.py "$(date -u +"%H%M%S")" | grep -Po "version=\d+(\.\d+){3}" >> "$GITHUB_OUTPUT"
|
relevant_files=(
|
||||||
|
"yt_dlp/*.py"
|
||||||
|
':!yt_dlp/version.py'
|
||||||
|
"bundle/*.py"
|
||||||
|
"pyproject.toml"
|
||||||
|
"Makefile"
|
||||||
|
".github/workflows/build.yml"
|
||||||
|
)
|
||||||
|
echo "commit=$(git log --format=%H -1 --since="24 hours ago" -- "${relevant_files[@]}")" | tee "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
build:
|
release:
|
||||||
needs: prepare
|
needs: [check_nightly]
|
||||||
uses: ./.github/workflows/build.yml
|
if: ${{ needs.check_nightly.outputs.commit }}
|
||||||
|
uses: ./.github/workflows/release.yml
|
||||||
with:
|
with:
|
||||||
version: ${{ needs.prepare.outputs.version }}
|
prerelease: true
|
||||||
channel: nightly
|
source: nightly
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
packages: write # For package cache
|
|
||||||
secrets:
|
|
||||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
|
||||||
|
|
||||||
publish:
|
|
||||||
needs: [prepare, build]
|
|
||||||
uses: ./.github/workflows/publish.yml
|
|
||||||
secrets:
|
|
||||||
ARCHIVE_REPO_TOKEN: ${{ secrets.ARCHIVE_REPO_TOKEN }}
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
with:
|
packages: write # For package cache
|
||||||
channel: nightly
|
actions: write # For cleaning up cache
|
||||||
prerelease: true
|
id-token: write # mandatory for trusted publishing
|
||||||
version: ${{ needs.prepare.outputs.version }}
|
secrets: inherit
|
||||||
target_commitish: ${{ github.sha }}
|
|
||||||
|
@ -0,0 +1,14 @@
|
|||||||
|
repos:
|
||||||
|
- repo: local
|
||||||
|
hooks:
|
||||||
|
- id: linter
|
||||||
|
name: Apply linter fixes
|
||||||
|
entry: ruff check --fix .
|
||||||
|
language: system
|
||||||
|
types: [python]
|
||||||
|
require_serial: true
|
||||||
|
- id: format
|
||||||
|
name: Apply formatting fixes
|
||||||
|
entry: autopep8 --in-place .
|
||||||
|
language: system
|
||||||
|
types: [python]
|
@ -0,0 +1,9 @@
|
|||||||
|
repos:
|
||||||
|
- repo: local
|
||||||
|
hooks:
|
||||||
|
- id: fix
|
||||||
|
name: Apply code fixes
|
||||||
|
entry: hatch fmt
|
||||||
|
language: system
|
||||||
|
types: [python]
|
||||||
|
require_serial: true
|
@ -1,10 +0,0 @@
|
|||||||
include AUTHORS
|
|
||||||
include Changelog.md
|
|
||||||
include LICENSE
|
|
||||||
include README.md
|
|
||||||
include completions/*/*
|
|
||||||
include supportedsites.md
|
|
||||||
include yt-dlp.1
|
|
||||||
include requirements.txt
|
|
||||||
recursive-include devscripts *
|
|
||||||
recursive-include test *
|
|
@ -0,0 +1,10 @@
|
|||||||
|
services:
|
||||||
|
static:
|
||||||
|
build: static
|
||||||
|
environment:
|
||||||
|
channel: ${channel}
|
||||||
|
origin: ${origin}
|
||||||
|
version: ${version}
|
||||||
|
volumes:
|
||||||
|
- ~/build:/build
|
||||||
|
- ../..:/yt-dlp
|
@ -0,0 +1,21 @@
|
|||||||
|
FROM alpine:3.19 as base
|
||||||
|
|
||||||
|
RUN apk --update add --no-cache \
|
||||||
|
build-base \
|
||||||
|
python3 \
|
||||||
|
pipx \
|
||||||
|
;
|
||||||
|
|
||||||
|
RUN pipx install pyinstaller
|
||||||
|
# Requires above step to prepare the shared venv
|
||||||
|
RUN ~/.local/share/pipx/shared/bin/python -m pip install -U wheel
|
||||||
|
RUN apk --update add --no-cache \
|
||||||
|
scons \
|
||||||
|
patchelf \
|
||||||
|
binutils \
|
||||||
|
;
|
||||||
|
RUN pipx install staticx
|
||||||
|
|
||||||
|
WORKDIR /yt-dlp
|
||||||
|
COPY entrypoint.sh /entrypoint.sh
|
||||||
|
ENTRYPOINT /entrypoint.sh
|
@ -0,0 +1,13 @@
|
|||||||
|
#!/bin/ash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
source ~/.local/share/pipx/venvs/pyinstaller/bin/activate
|
||||||
|
python -m devscripts.install_deps --include secretstorage
|
||||||
|
python -m devscripts.make_lazy_extractors
|
||||||
|
python devscripts/update-version.py -c "${channel}" -r "${origin}" "${version}"
|
||||||
|
python -m bundle.pyinstaller
|
||||||
|
deactivate
|
||||||
|
|
||||||
|
source ~/.local/share/pipx/venvs/staticx/bin/activate
|
||||||
|
staticx /yt-dlp/dist/yt-dlp_linux /build/yt-dlp_linux
|
||||||
|
deactivate
|
@ -0,0 +1,59 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Allow execution from anywhere
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from py2exe import freeze
|
||||||
|
|
||||||
|
from devscripts.utils import read_version
|
||||||
|
|
||||||
|
VERSION = read_version()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
warnings.warn(
|
||||||
|
'py2exe builds do not support pycryptodomex and needs VC++14 to run. '
|
||||||
|
'It is recommended to run "pyinst.py" to build using pyinstaller instead')
|
||||||
|
|
||||||
|
freeze(
|
||||||
|
console=[{
|
||||||
|
'script': './yt_dlp/__main__.py',
|
||||||
|
'dest_base': 'yt-dlp',
|
||||||
|
'icon_resources': [(1, 'devscripts/logo.ico')],
|
||||||
|
}],
|
||||||
|
version_info={
|
||||||
|
'version': VERSION,
|
||||||
|
'description': 'A feature-rich command-line audio/video downloader',
|
||||||
|
'comments': 'Official repository: <https://github.com/yt-dlp/yt-dlp>',
|
||||||
|
'product_name': 'yt-dlp',
|
||||||
|
'product_version': VERSION,
|
||||||
|
},
|
||||||
|
options={
|
||||||
|
'bundle_files': 0,
|
||||||
|
'compressed': 1,
|
||||||
|
'optimize': 2,
|
||||||
|
'dist_dir': './dist',
|
||||||
|
'excludes': [
|
||||||
|
# py2exe cannot import Crypto
|
||||||
|
'Crypto',
|
||||||
|
'Cryptodome',
|
||||||
|
# requests >=2.32.0 breaks py2exe builds due to certifi dependency
|
||||||
|
'requests',
|
||||||
|
'urllib3',
|
||||||
|
],
|
||||||
|
'dll_excludes': ['w9xpopen.exe', 'crypt32.dll'],
|
||||||
|
# Modules that are only imported dynamically must be added here
|
||||||
|
'includes': ['yt_dlp.compat._legacy', 'yt_dlp.compat._deprecated',
|
||||||
|
'yt_dlp.utils._legacy', 'yt_dlp.utils._deprecated'],
|
||||||
|
},
|
||||||
|
zipfile=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
Binary file not shown.
Binary file not shown.
@ -1 +0,0 @@
|
|||||||
# Empty file needed to make devscripts.utils properly importable from outside
|
|
@ -0,0 +1,81 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Allow execution from anywhere
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from devscripts.tomlparse import parse_toml
|
||||||
|
from devscripts.utils import read_file
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args():
|
||||||
|
parser = argparse.ArgumentParser(description='Install dependencies for yt-dlp')
|
||||||
|
parser.add_argument(
|
||||||
|
'input', nargs='?', metavar='TOMLFILE', default=Path(__file__).parent.parent / 'pyproject.toml',
|
||||||
|
help='input file (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'-e', '--exclude', metavar='DEPENDENCY', action='append',
|
||||||
|
help='exclude a dependency')
|
||||||
|
parser.add_argument(
|
||||||
|
'-i', '--include', metavar='GROUP', action='append',
|
||||||
|
help='include an optional dependency group')
|
||||||
|
parser.add_argument(
|
||||||
|
'-o', '--only-optional', action='store_true',
|
||||||
|
help='only install optional dependencies')
|
||||||
|
parser.add_argument(
|
||||||
|
'-p', '--print', action='store_true',
|
||||||
|
help='only print requirements to stdout')
|
||||||
|
parser.add_argument(
|
||||||
|
'-u', '--user', action='store_true',
|
||||||
|
help='install with pip as --user')
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
args = parse_args()
|
||||||
|
project_table = parse_toml(read_file(args.input))['project']
|
||||||
|
recursive_pattern = re.compile(rf'{project_table["name"]}\[(?P<group_name>[\w-]+)\]')
|
||||||
|
optional_groups = project_table['optional-dependencies']
|
||||||
|
excludes = args.exclude or []
|
||||||
|
|
||||||
|
def yield_deps(group):
|
||||||
|
for dep in group:
|
||||||
|
if mobj := recursive_pattern.fullmatch(dep):
|
||||||
|
yield from optional_groups.get(mobj.group('group_name'), [])
|
||||||
|
else:
|
||||||
|
yield dep
|
||||||
|
|
||||||
|
targets = []
|
||||||
|
if not args.only_optional: # `-o` should exclude 'dependencies' and the 'default' group
|
||||||
|
targets.extend(project_table['dependencies'])
|
||||||
|
if 'default' not in excludes: # `--exclude default` should exclude entire 'default' group
|
||||||
|
targets.extend(yield_deps(optional_groups['default']))
|
||||||
|
|
||||||
|
for include in filter(None, map(optional_groups.get, args.include or [])):
|
||||||
|
targets.extend(yield_deps(include))
|
||||||
|
|
||||||
|
targets = [t for t in targets if re.match(r'[\w-]+', t).group(0).lower() not in excludes]
|
||||||
|
|
||||||
|
if args.print:
|
||||||
|
for target in targets:
|
||||||
|
print(target)
|
||||||
|
return
|
||||||
|
|
||||||
|
pip_args = [sys.executable, '-m', 'pip', 'install', '-U']
|
||||||
|
if args.user:
|
||||||
|
pip_args.append('--user')
|
||||||
|
pip_args.extend(targets)
|
||||||
|
|
||||||
|
return subprocess.call(pip_args)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.exit(main())
|
@ -1,17 +0,0 @@
|
|||||||
@setlocal
|
|
||||||
@echo off
|
|
||||||
cd /d %~dp0..
|
|
||||||
|
|
||||||
if ["%~1"]==[""] (
|
|
||||||
set "test_set="test""
|
|
||||||
) else if ["%~1"]==["core"] (
|
|
||||||
set "test_set="-m not download""
|
|
||||||
) else if ["%~1"]==["download"] (
|
|
||||||
set "test_set="-m "download""
|
|
||||||
) else (
|
|
||||||
echo.Invalid test type "%~1". Use "core" ^| "download"
|
|
||||||
exit /b 1
|
|
||||||
)
|
|
||||||
|
|
||||||
set PYTHONWARNINGS=error
|
|
||||||
pytest %test_set%
|
|
@ -0,0 +1,75 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import functools
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shlex
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
fix_test_name = functools.partial(re.compile(r'IE(_all|_\d+)?$').sub, r'\1')
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args():
|
||||||
|
parser = argparse.ArgumentParser(description='Run selected yt-dlp tests')
|
||||||
|
parser.add_argument(
|
||||||
|
'test', help='a extractor tests, or one of "core" or "download"', nargs='*')
|
||||||
|
parser.add_argument(
|
||||||
|
'-k', help='run a test matching EXPRESSION. Same as "pytest -k"', metavar='EXPRESSION')
|
||||||
|
parser.add_argument(
|
||||||
|
'--pytest-args', help='arguments to passthrough to pytest')
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def run_tests(*tests, pattern=None, ci=False):
|
||||||
|
run_core = 'core' in tests or (not pattern and not tests)
|
||||||
|
run_download = 'download' in tests
|
||||||
|
tests = list(map(fix_test_name, tests))
|
||||||
|
|
||||||
|
pytest_args = args.pytest_args or os.getenv('HATCH_TEST_ARGS', '')
|
||||||
|
arguments = ['pytest', '-Werror', '--tb=short', *shlex.split(pytest_args)]
|
||||||
|
if ci:
|
||||||
|
arguments.append('--color=yes')
|
||||||
|
if pattern:
|
||||||
|
arguments.extend(['-k', pattern])
|
||||||
|
if run_core:
|
||||||
|
arguments.extend(['-m', 'not download'])
|
||||||
|
elif run_download:
|
||||||
|
arguments.extend(['-m', 'download'])
|
||||||
|
else:
|
||||||
|
arguments.extend(
|
||||||
|
f'test/test_download.py::TestDownload::test_{test}' for test in tests)
|
||||||
|
|
||||||
|
print(f'Running {arguments}', flush=True)
|
||||||
|
try:
|
||||||
|
return subprocess.call(arguments)
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
arguments = [sys.executable, '-Werror', '-m', 'unittest']
|
||||||
|
if pattern:
|
||||||
|
arguments.extend(['-k', pattern])
|
||||||
|
if run_core:
|
||||||
|
print('"pytest" needs to be installed to run core tests', file=sys.stderr, flush=True)
|
||||||
|
return 1
|
||||||
|
elif run_download:
|
||||||
|
arguments.append('test.test_download')
|
||||||
|
else:
|
||||||
|
arguments.extend(
|
||||||
|
f'test.test_download.TestDownload.test_{test}' for test in tests)
|
||||||
|
|
||||||
|
print(f'Running {arguments}', flush=True)
|
||||||
|
return subprocess.call(arguments)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
try:
|
||||||
|
args = parse_args()
|
||||||
|
|
||||||
|
os.chdir(Path(__file__).parent.parent)
|
||||||
|
sys.exit(run_tests(*args.test, pattern=args.k, ci=bool(os.getenv('CI'))))
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
@ -1,14 +0,0 @@
|
|||||||
#!/usr/bin/env sh
|
|
||||||
|
|
||||||
if [ -z "$1" ]; then
|
|
||||||
test_set='test'
|
|
||||||
elif [ "$1" = 'core' ]; then
|
|
||||||
test_set="-m not download"
|
|
||||||
elif [ "$1" = 'download' ]; then
|
|
||||||
test_set="-m download"
|
|
||||||
else
|
|
||||||
echo 'Invalid test type "'"$1"'". Use "core" | "download"'
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
python3 -bb -Werror -m pytest "$test_set"
|
|
@ -0,0 +1,189 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
"""
|
||||||
|
Simple parser for spec compliant toml files
|
||||||
|
|
||||||
|
A simple toml parser for files that comply with the spec.
|
||||||
|
Should only be used to parse `pyproject.toml` for `install_deps.py`.
|
||||||
|
|
||||||
|
IMPORTANT: INVALID FILES OR MULTILINE STRINGS ARE NOT SUPPORTED!
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import datetime as dt
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
|
||||||
|
WS = r'(?:[\ \t]*)'
|
||||||
|
STRING_RE = re.compile(r'"(?:\\.|[^\\"\n])*"|\'[^\'\n]*\'')
|
||||||
|
SINGLE_KEY_RE = re.compile(rf'{STRING_RE.pattern}|[A-Za-z0-9_-]+')
|
||||||
|
KEY_RE = re.compile(rf'{WS}(?:{SINGLE_KEY_RE.pattern}){WS}(?:\.{WS}(?:{SINGLE_KEY_RE.pattern}){WS})*')
|
||||||
|
EQUALS_RE = re.compile(rf'={WS}')
|
||||||
|
WS_RE = re.compile(WS)
|
||||||
|
|
||||||
|
_SUBTABLE = rf'(?P<subtable>^\[(?P<is_list>\[)?(?P<path>{KEY_RE.pattern})\]\]?)'
|
||||||
|
EXPRESSION_RE = re.compile(rf'^(?:{_SUBTABLE}|{KEY_RE.pattern}=)', re.MULTILINE)
|
||||||
|
|
||||||
|
LIST_WS_RE = re.compile(rf'{WS}((#[^\n]*)?\n{WS})*')
|
||||||
|
LEFTOVER_VALUE_RE = re.compile(r'[^,}\]\t\n#]+')
|
||||||
|
|
||||||
|
|
||||||
|
def parse_key(value: str):
|
||||||
|
for match in SINGLE_KEY_RE.finditer(value):
|
||||||
|
if match[0][0] == '"':
|
||||||
|
yield json.loads(match[0])
|
||||||
|
elif match[0][0] == '\'':
|
||||||
|
yield match[0][1:-1]
|
||||||
|
else:
|
||||||
|
yield match[0]
|
||||||
|
|
||||||
|
|
||||||
|
def get_target(root: dict, paths: list[str], is_list=False):
|
||||||
|
target = root
|
||||||
|
|
||||||
|
for index, key in enumerate(paths, 1):
|
||||||
|
use_list = is_list and index == len(paths)
|
||||||
|
result = target.get(key)
|
||||||
|
if result is None:
|
||||||
|
result = [] if use_list else {}
|
||||||
|
target[key] = result
|
||||||
|
|
||||||
|
if isinstance(result, dict):
|
||||||
|
target = result
|
||||||
|
elif use_list:
|
||||||
|
target = {}
|
||||||
|
result.append(target)
|
||||||
|
else:
|
||||||
|
target = result[-1]
|
||||||
|
|
||||||
|
assert isinstance(target, dict)
|
||||||
|
return target
|
||||||
|
|
||||||
|
|
||||||
|
def parse_enclosed(data: str, index: int, end: str, ws_re: re.Pattern):
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
if match := ws_re.match(data, index):
|
||||||
|
index = match.end()
|
||||||
|
|
||||||
|
while data[index] != end:
|
||||||
|
index = yield True, index
|
||||||
|
|
||||||
|
if match := ws_re.match(data, index):
|
||||||
|
index = match.end()
|
||||||
|
|
||||||
|
if data[index] == ',':
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
if match := ws_re.match(data, index):
|
||||||
|
index = match.end()
|
||||||
|
|
||||||
|
assert data[index] == end
|
||||||
|
yield False, index + 1
|
||||||
|
|
||||||
|
|
||||||
|
def parse_value(data: str, index: int):
|
||||||
|
if data[index] == '[':
|
||||||
|
result = []
|
||||||
|
|
||||||
|
indices = parse_enclosed(data, index, ']', LIST_WS_RE)
|
||||||
|
valid, index = next(indices)
|
||||||
|
while valid:
|
||||||
|
index, value = parse_value(data, index)
|
||||||
|
result.append(value)
|
||||||
|
valid, index = indices.send(index)
|
||||||
|
|
||||||
|
return index, result
|
||||||
|
|
||||||
|
if data[index] == '{':
|
||||||
|
result = {}
|
||||||
|
|
||||||
|
indices = parse_enclosed(data, index, '}', WS_RE)
|
||||||
|
valid, index = next(indices)
|
||||||
|
while valid:
|
||||||
|
valid, index = indices.send(parse_kv_pair(data, index, result))
|
||||||
|
|
||||||
|
return index, result
|
||||||
|
|
||||||
|
if match := STRING_RE.match(data, index):
|
||||||
|
return match.end(), json.loads(match[0]) if match[0][0] == '"' else match[0][1:-1]
|
||||||
|
|
||||||
|
match = LEFTOVER_VALUE_RE.match(data, index)
|
||||||
|
assert match
|
||||||
|
value = match[0].strip()
|
||||||
|
for func in [
|
||||||
|
int,
|
||||||
|
float,
|
||||||
|
dt.time.fromisoformat,
|
||||||
|
dt.date.fromisoformat,
|
||||||
|
dt.datetime.fromisoformat,
|
||||||
|
{'true': True, 'false': False}.get,
|
||||||
|
]:
|
||||||
|
try:
|
||||||
|
value = func(value)
|
||||||
|
break
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return match.end(), value
|
||||||
|
|
||||||
|
|
||||||
|
def parse_kv_pair(data: str, index: int, target: dict):
|
||||||
|
match = KEY_RE.match(data, index)
|
||||||
|
if not match:
|
||||||
|
return None
|
||||||
|
|
||||||
|
*keys, key = parse_key(match[0])
|
||||||
|
|
||||||
|
match = EQUALS_RE.match(data, match.end())
|
||||||
|
assert match
|
||||||
|
index = match.end()
|
||||||
|
|
||||||
|
index, value = parse_value(data, index)
|
||||||
|
get_target(target, keys)[key] = value
|
||||||
|
return index
|
||||||
|
|
||||||
|
|
||||||
|
def parse_toml(data: str):
|
||||||
|
root = {}
|
||||||
|
target = root
|
||||||
|
|
||||||
|
index = 0
|
||||||
|
while True:
|
||||||
|
match = EXPRESSION_RE.search(data, index)
|
||||||
|
if not match:
|
||||||
|
break
|
||||||
|
|
||||||
|
if match.group('subtable'):
|
||||||
|
index = match.end()
|
||||||
|
path, is_list = match.group('path', 'is_list')
|
||||||
|
target = get_target(root, list(parse_key(path)), bool(is_list))
|
||||||
|
continue
|
||||||
|
|
||||||
|
index = parse_kv_pair(data, match.start(), target)
|
||||||
|
assert index is not None
|
||||||
|
|
||||||
|
return root
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
import argparse
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument('infile', type=Path, help='The TOML file to read as input')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
with args.infile.open('r', encoding='utf-8') as file:
|
||||||
|
data = file.read()
|
||||||
|
|
||||||
|
def default(obj):
|
||||||
|
if isinstance(obj, (dt.date, dt.time, dt.datetime)):
|
||||||
|
return obj.isoformat()
|
||||||
|
|
||||||
|
print(json.dumps(parse_toml(data), default=default))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
@ -1,39 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
"""
|
|
||||||
Usage: python3 ./devscripts/update-formulae.py <path-to-formulae-rb> <version>
|
|
||||||
version can be either 0-aligned (yt-dlp version) or normalized (PyPi version)
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Allow direct execution
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
|
|
||||||
import json
|
|
||||||
import re
|
|
||||||
import urllib.request
|
|
||||||
|
|
||||||
from devscripts.utils import read_file, write_file
|
|
||||||
|
|
||||||
filename, version = sys.argv[1:]
|
|
||||||
|
|
||||||
normalized_version = '.'.join(str(int(x)) for x in version.split('.'))
|
|
||||||
|
|
||||||
pypi_release = json.loads(urllib.request.urlopen(
|
|
||||||
'https://pypi.org/pypi/yt-dlp/%s/json' % normalized_version
|
|
||||||
).read().decode())
|
|
||||||
|
|
||||||
tarball_file = next(x for x in pypi_release['urls'] if x['filename'].endswith('.tar.gz'))
|
|
||||||
|
|
||||||
sha256sum = tarball_file['digests']['sha256']
|
|
||||||
url = tarball_file['url']
|
|
||||||
|
|
||||||
formulae_text = read_file(filename)
|
|
||||||
|
|
||||||
formulae_text = re.sub(r'sha256 "[0-9a-f]*?"', 'sha256 "%s"' % sha256sum, formulae_text, count=1)
|
|
||||||
formulae_text = re.sub(r'url "[^"]*?"', 'url "%s"' % url, formulae_text, count=1)
|
|
||||||
|
|
||||||
write_file(filename, formulae_text)
|
|
@ -0,0 +1,26 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from devscripts.make_changelog import create_changelog, create_parser
|
||||||
|
from devscripts.utils import read_file, read_version, write_file
|
||||||
|
|
||||||
|
# Always run after devscripts/update-version.py, and run before `make doc|pypi-files|tar|all`
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
parser = create_parser()
|
||||||
|
parser.description = 'Update an existing changelog file with an entry for a new release'
|
||||||
|
parser.add_argument(
|
||||||
|
'--changelog-path', type=Path, default=Path(__file__).parent.parent / 'Changelog.md',
|
||||||
|
help='path to the Changelog file')
|
||||||
|
args = parser.parse_args()
|
||||||
|
new_entry = create_changelog(args)
|
||||||
|
|
||||||
|
header, sep, changelog = read_file(args.changelog_path).partition('\n### ')
|
||||||
|
write_file(args.changelog_path, f'{header}{sep}{read_version()}\n{new_entry}\n{sep}{changelog}')
|
@ -1,5 +1,383 @@
|
|||||||
[build-system]
|
[build-system]
|
||||||
build-backend = 'setuptools.build_meta'
|
requires = ["hatchling"]
|
||||||
# https://github.com/yt-dlp/yt-dlp/issues/5941
|
build-backend = "hatchling.build"
|
||||||
# https://github.com/pypa/distutils/issues/17
|
|
||||||
requires = ['setuptools > 50']
|
[project]
|
||||||
|
name = "yt-dlp"
|
||||||
|
maintainers = [
|
||||||
|
{name = "pukkandan", email = "pukkandan.ytdlp@gmail.com"},
|
||||||
|
{name = "Grub4K", email = "contact@grub4k.xyz"},
|
||||||
|
{name = "bashonly", email = "bashonly@protonmail.com"},
|
||||||
|
{name = "coletdjnz", email = "coletdjnz@protonmail.com"},
|
||||||
|
]
|
||||||
|
description = "A feature-rich command-line audio/video downloader"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.8"
|
||||||
|
keywords = [
|
||||||
|
"youtube-dl",
|
||||||
|
"video-downloader",
|
||||||
|
"youtube-downloader",
|
||||||
|
"sponsorblock",
|
||||||
|
"youtube-dlc",
|
||||||
|
"yt-dlp",
|
||||||
|
]
|
||||||
|
license = {file = "LICENSE"}
|
||||||
|
classifiers = [
|
||||||
|
"Topic :: Multimedia :: Video",
|
||||||
|
"Development Status :: 5 - Production/Stable",
|
||||||
|
"Environment :: Console",
|
||||||
|
"Programming Language :: Python",
|
||||||
|
"Programming Language :: Python :: 3 :: Only",
|
||||||
|
"Programming Language :: Python :: 3.8",
|
||||||
|
"Programming Language :: Python :: 3.9",
|
||||||
|
"Programming Language :: Python :: 3.10",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Programming Language :: Python :: 3.12",
|
||||||
|
"Programming Language :: Python :: Implementation",
|
||||||
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
|
"Programming Language :: Python :: Implementation :: PyPy",
|
||||||
|
"License :: OSI Approved :: The Unlicense (Unlicense)",
|
||||||
|
"Operating System :: OS Independent",
|
||||||
|
]
|
||||||
|
dynamic = ["version"]
|
||||||
|
dependencies = [
|
||||||
|
"brotli; implementation_name=='cpython'",
|
||||||
|
"brotlicffi; implementation_name!='cpython'",
|
||||||
|
"certifi",
|
||||||
|
"mutagen",
|
||||||
|
"pycryptodomex",
|
||||||
|
"requests>=2.32.2,<3",
|
||||||
|
"urllib3>=1.26.17,<3",
|
||||||
|
"websockets>=12.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
default = []
|
||||||
|
curl-cffi = ["curl-cffi==0.5.10; implementation_name=='cpython'"]
|
||||||
|
secretstorage = [
|
||||||
|
"cffi",
|
||||||
|
"secretstorage",
|
||||||
|
]
|
||||||
|
build = [
|
||||||
|
"build",
|
||||||
|
"hatchling",
|
||||||
|
"pip",
|
||||||
|
"setuptools",
|
||||||
|
"wheel",
|
||||||
|
]
|
||||||
|
dev = [
|
||||||
|
"pre-commit",
|
||||||
|
"yt-dlp[static-analysis]",
|
||||||
|
"yt-dlp[test]",
|
||||||
|
]
|
||||||
|
static-analysis = [
|
||||||
|
"autopep8~=2.0",
|
||||||
|
"ruff~=0.4.4",
|
||||||
|
]
|
||||||
|
test = [
|
||||||
|
"pytest~=8.1",
|
||||||
|
]
|
||||||
|
pyinstaller = [
|
||||||
|
"pyinstaller>=6.7.0", # for compat with setuptools>=70
|
||||||
|
]
|
||||||
|
py2exe = [
|
||||||
|
"py2exe>=0.12",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
Documentation = "https://github.com/yt-dlp/yt-dlp#readme"
|
||||||
|
Repository = "https://github.com/yt-dlp/yt-dlp"
|
||||||
|
Tracker = "https://github.com/yt-dlp/yt-dlp/issues"
|
||||||
|
Funding = "https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators"
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
yt-dlp = "yt_dlp:main"
|
||||||
|
|
||||||
|
[project.entry-points.pyinstaller40]
|
||||||
|
hook-dirs = "yt_dlp.__pyinstaller:get_hook_dirs"
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.sdist]
|
||||||
|
include = [
|
||||||
|
"/yt_dlp",
|
||||||
|
"/devscripts",
|
||||||
|
"/test",
|
||||||
|
"/.gitignore", # included by default, needed for auto-excludes
|
||||||
|
"/Changelog.md",
|
||||||
|
"/LICENSE", # included as license
|
||||||
|
"/pyproject.toml", # included by default
|
||||||
|
"/README.md", # included as readme
|
||||||
|
"/setup.cfg",
|
||||||
|
"/supportedsites.md",
|
||||||
|
]
|
||||||
|
artifacts = [
|
||||||
|
"/yt_dlp/extractor/lazy_extractors.py",
|
||||||
|
"/completions",
|
||||||
|
"/AUTHORS", # included by default
|
||||||
|
"/README.txt",
|
||||||
|
"/yt-dlp.1",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.wheel]
|
||||||
|
packages = ["yt_dlp"]
|
||||||
|
artifacts = ["/yt_dlp/extractor/lazy_extractors.py"]
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.wheel.shared-data]
|
||||||
|
"completions/bash/yt-dlp" = "share/bash-completion/completions/yt-dlp"
|
||||||
|
"completions/zsh/_yt-dlp" = "share/zsh/site-functions/_yt-dlp"
|
||||||
|
"completions/fish/yt-dlp.fish" = "share/fish/vendor_completions.d/yt-dlp.fish"
|
||||||
|
"README.txt" = "share/doc/yt_dlp/README.txt"
|
||||||
|
"yt-dlp.1" = "share/man/man1/yt-dlp.1"
|
||||||
|
|
||||||
|
[tool.hatch.version]
|
||||||
|
path = "yt_dlp/version.py"
|
||||||
|
pattern = "_pkg_version = '(?P<version>[^']+)'"
|
||||||
|
|
||||||
|
[tool.hatch.envs.default]
|
||||||
|
features = ["curl-cffi", "default"]
|
||||||
|
dependencies = ["pre-commit"]
|
||||||
|
path = ".venv"
|
||||||
|
installer = "uv"
|
||||||
|
|
||||||
|
[tool.hatch.envs.default.scripts]
|
||||||
|
setup = "pre-commit install --config .pre-commit-hatch.yaml"
|
||||||
|
yt-dlp = "python -Werror -Xdev -m yt_dlp {args}"
|
||||||
|
|
||||||
|
[tool.hatch.envs.hatch-static-analysis]
|
||||||
|
detached = true
|
||||||
|
features = ["static-analysis"]
|
||||||
|
dependencies = [] # override hatch ruff version
|
||||||
|
config-path = "pyproject.toml"
|
||||||
|
|
||||||
|
[tool.hatch.envs.hatch-static-analysis.scripts]
|
||||||
|
format-check = "autopep8 --diff {args:.}"
|
||||||
|
format-fix = "autopep8 --in-place {args:.}"
|
||||||
|
lint-check = "ruff check {args:.}"
|
||||||
|
lint-fix = "ruff check --fix {args:.}"
|
||||||
|
|
||||||
|
[tool.hatch.envs.hatch-test]
|
||||||
|
features = ["test"]
|
||||||
|
dependencies = [
|
||||||
|
"pytest-randomly~=3.15",
|
||||||
|
"pytest-rerunfailures~=14.0",
|
||||||
|
"pytest-xdist[psutil]~=3.5",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.hatch.envs.hatch-test.scripts]
|
||||||
|
run = "python -m devscripts.run_tests {args}"
|
||||||
|
run-cov = "echo Code coverage not implemented && exit 1"
|
||||||
|
|
||||||
|
[[tool.hatch.envs.hatch-test.matrix]]
|
||||||
|
python = [
|
||||||
|
"3.8",
|
||||||
|
"3.9",
|
||||||
|
"3.10",
|
||||||
|
"3.11",
|
||||||
|
"3.12",
|
||||||
|
"pypy3.8",
|
||||||
|
"pypy3.9",
|
||||||
|
"pypy3.10",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
line-length = 120
|
||||||
|
|
||||||
|
[tool.ruff.lint]
|
||||||
|
ignore = [
|
||||||
|
"E402", # module-import-not-at-top-of-file
|
||||||
|
"E501", # line-too-long
|
||||||
|
"E731", # lambda-assignment
|
||||||
|
"E741", # ambiguous-variable-name
|
||||||
|
"UP036", # outdated-version-block
|
||||||
|
"B006", # mutable-argument-default
|
||||||
|
"B008", # function-call-in-default-argument
|
||||||
|
"B011", # assert-false
|
||||||
|
"B017", # assert-raises-exception
|
||||||
|
"B023", # function-uses-loop-variable (false positives)
|
||||||
|
"B028", # no-explicit-stacklevel
|
||||||
|
"B904", # raise-without-from-inside-except
|
||||||
|
"C401", # unnecessary-generator-set
|
||||||
|
"C402", # unnecessary-generator-dict
|
||||||
|
"PIE790", # unnecessary-placeholder
|
||||||
|
"SIM102", # collapsible-if
|
||||||
|
"SIM108", # if-else-block-instead-of-if-exp
|
||||||
|
"SIM112", # uncapitalized-environment-variables
|
||||||
|
"SIM113", # enumerate-for-loop
|
||||||
|
"SIM114", # if-with-same-arms
|
||||||
|
"SIM115", # open-file-with-context-handler
|
||||||
|
"SIM117", # multiple-with-statements
|
||||||
|
"SIM223", # expr-and-false
|
||||||
|
"SIM300", # yoda-conditions
|
||||||
|
"TD001", # invalid-todo-tag
|
||||||
|
"TD002", # missing-todo-author
|
||||||
|
"TD003", # missing-todo-link
|
||||||
|
"PLE0604", # invalid-all-object (false positives)
|
||||||
|
"PLW0603", # global-statement
|
||||||
|
"PLW1510", # subprocess-run-without-check
|
||||||
|
"PLW2901", # redefined-loop-name
|
||||||
|
"RUF001", # ambiguous-unicode-character-string
|
||||||
|
"RUF012", # mutable-class-default
|
||||||
|
"RUF100", # unused-noqa (flake8 has slightly different behavior)
|
||||||
|
]
|
||||||
|
select = [
|
||||||
|
"E", # pycodestyle Error
|
||||||
|
"W", # pycodestyle Warning
|
||||||
|
"F", # Pyflakes
|
||||||
|
"I", # isort
|
||||||
|
"Q", # flake8-quotes
|
||||||
|
"N803", # invalid-argument-name
|
||||||
|
"N804", # invalid-first-argument-name-for-class-method
|
||||||
|
"UP", # pyupgrade
|
||||||
|
"B", # flake8-bugbear
|
||||||
|
"A", # flake8-builtins
|
||||||
|
"COM", # flake8-commas
|
||||||
|
"C4", # flake8-comprehensions
|
||||||
|
"FA", # flake8-future-annotations
|
||||||
|
"ISC", # flake8-implicit-str-concat
|
||||||
|
"ICN003", # banned-import-from
|
||||||
|
"PIE", # flake8-pie
|
||||||
|
"T20", # flake8-print
|
||||||
|
"RSE", # flake8-raise
|
||||||
|
"RET504", # unnecessary-assign
|
||||||
|
"SIM", # flake8-simplify
|
||||||
|
"TID251", # banned-api
|
||||||
|
"TD", # flake8-todos
|
||||||
|
"PLC", # Pylint Convention
|
||||||
|
"PLE", # Pylint Error
|
||||||
|
"PLW", # Pylint Warning
|
||||||
|
"RUF", # Ruff-specific rules
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.lint.per-file-ignores]
|
||||||
|
"devscripts/lazy_load_template.py" = [
|
||||||
|
"F401", # unused-import
|
||||||
|
]
|
||||||
|
"!yt_dlp/extractor/**.py" = [
|
||||||
|
"I", # isort
|
||||||
|
"ICN003", # banned-import-from
|
||||||
|
"T20", # flake8-print
|
||||||
|
"A002", # builtin-argument-shadowing
|
||||||
|
"C408", # unnecessary-collection-call
|
||||||
|
]
|
||||||
|
"yt_dlp/jsinterp.py" = [
|
||||||
|
"UP031", # printf-string-formatting
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.lint.isort]
|
||||||
|
known-first-party = [
|
||||||
|
"bundle",
|
||||||
|
"devscripts",
|
||||||
|
"test",
|
||||||
|
]
|
||||||
|
relative-imports-order = "closest-to-furthest"
|
||||||
|
|
||||||
|
[tool.ruff.lint.flake8-quotes]
|
||||||
|
docstring-quotes = "double"
|
||||||
|
multiline-quotes = "single"
|
||||||
|
inline-quotes = "single"
|
||||||
|
avoid-escape = false
|
||||||
|
|
||||||
|
[tool.ruff.lint.pep8-naming]
|
||||||
|
classmethod-decorators = [
|
||||||
|
"yt_dlp.utils.classproperty",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.lint.flake8-import-conventions]
|
||||||
|
banned-from = [
|
||||||
|
"base64",
|
||||||
|
"datetime",
|
||||||
|
"functools",
|
||||||
|
"glob",
|
||||||
|
"hashlib",
|
||||||
|
"itertools",
|
||||||
|
"json",
|
||||||
|
"math",
|
||||||
|
"os",
|
||||||
|
"pathlib",
|
||||||
|
"random",
|
||||||
|
"re",
|
||||||
|
"string",
|
||||||
|
"sys",
|
||||||
|
"time",
|
||||||
|
"urllib",
|
||||||
|
"uuid",
|
||||||
|
"xml",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.lint.flake8-tidy-imports.banned-api]
|
||||||
|
"yt_dlp.compat.compat_str".msg = "Use `str` instead."
|
||||||
|
"yt_dlp.compat.compat_b64decode".msg = "Use `base64.b64decode` instead."
|
||||||
|
"yt_dlp.compat.compat_urlparse".msg = "Use `urllib.parse` instead."
|
||||||
|
"yt_dlp.compat.compat_parse_qs".msg = "Use `urllib.parse.parse_qs` instead."
|
||||||
|
"yt_dlp.compat.compat_urllib_parse_unquote".msg = "Use `urllib.parse.unquote` instead."
|
||||||
|
"yt_dlp.compat.compat_urllib_parse_urlencode".msg = "Use `urllib.parse.urlencode` instead."
|
||||||
|
"yt_dlp.compat.compat_urllib_parse_urlparse".msg = "Use `urllib.parse.urlparse` instead."
|
||||||
|
"yt_dlp.compat.compat_shlex_quote".msg = "Use `yt_dlp.utils.shell_quote` instead."
|
||||||
|
"yt_dlp.utils.error_to_compat_str".msg = "Use `str` instead."
|
||||||
|
|
||||||
|
[tool.autopep8]
|
||||||
|
max_line_length = 120
|
||||||
|
recursive = true
|
||||||
|
exit-code = true
|
||||||
|
jobs = 0
|
||||||
|
select = [
|
||||||
|
"E101",
|
||||||
|
"E112",
|
||||||
|
"E113",
|
||||||
|
"E115",
|
||||||
|
"E116",
|
||||||
|
"E117",
|
||||||
|
"E121",
|
||||||
|
"E122",
|
||||||
|
"E123",
|
||||||
|
"E124",
|
||||||
|
"E125",
|
||||||
|
"E126",
|
||||||
|
"E127",
|
||||||
|
"E128",
|
||||||
|
"E129",
|
||||||
|
"E131",
|
||||||
|
"E201",
|
||||||
|
"E202",
|
||||||
|
"E203",
|
||||||
|
"E211",
|
||||||
|
"E221",
|
||||||
|
"E222",
|
||||||
|
"E223",
|
||||||
|
"E224",
|
||||||
|
"E225",
|
||||||
|
"E226",
|
||||||
|
"E227",
|
||||||
|
"E228",
|
||||||
|
"E231",
|
||||||
|
"E241",
|
||||||
|
"E242",
|
||||||
|
"E251",
|
||||||
|
"E252",
|
||||||
|
"E261",
|
||||||
|
"E262",
|
||||||
|
"E265",
|
||||||
|
"E266",
|
||||||
|
"E271",
|
||||||
|
"E272",
|
||||||
|
"E273",
|
||||||
|
"E274",
|
||||||
|
"E275",
|
||||||
|
"E301",
|
||||||
|
"E302",
|
||||||
|
"E303",
|
||||||
|
"E304",
|
||||||
|
"E305",
|
||||||
|
"E306",
|
||||||
|
"E502",
|
||||||
|
"E701",
|
||||||
|
"E702",
|
||||||
|
"E704",
|
||||||
|
"W391",
|
||||||
|
"W504",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
addopts = "-ra -v --strict-markers"
|
||||||
|
markers = [
|
||||||
|
"download",
|
||||||
|
]
|
||||||
|
@ -1,6 +0,0 @@
|
|||||||
mutagen
|
|
||||||
pycryptodomex
|
|
||||||
websockets
|
|
||||||
brotli; platform_python_implementation=='CPython'
|
|
||||||
brotlicffi; platform_python_implementation!='CPython'
|
|
||||||
certifi
|
|
@ -1,175 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# Allow execution from anywhere
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
|
||||||
|
|
||||||
import subprocess
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
try:
|
|
||||||
from setuptools import Command, find_packages, setup
|
|
||||||
setuptools_available = True
|
|
||||||
except ImportError:
|
|
||||||
from distutils.core import Command, setup
|
|
||||||
setuptools_available = False
|
|
||||||
|
|
||||||
from devscripts.utils import read_file, read_version
|
|
||||||
|
|
||||||
VERSION = read_version()
|
|
||||||
|
|
||||||
DESCRIPTION = 'A youtube-dl fork with additional features and patches'
|
|
||||||
|
|
||||||
LONG_DESCRIPTION = '\n\n'.join((
|
|
||||||
'Official repository: <https://github.com/yt-dlp/yt-dlp>',
|
|
||||||
'**PS**: Some links in this document will not work since this is a copy of the README.md from Github',
|
|
||||||
read_file('README.md')))
|
|
||||||
|
|
||||||
REQUIREMENTS = read_file('requirements.txt').splitlines()
|
|
||||||
|
|
||||||
|
|
||||||
def packages():
|
|
||||||
if setuptools_available:
|
|
||||||
return find_packages(exclude=('youtube_dl', 'youtube_dlc', 'test', 'ytdlp_plugins', 'devscripts'))
|
|
||||||
|
|
||||||
return [
|
|
||||||
'yt_dlp', 'yt_dlp.extractor', 'yt_dlp.downloader', 'yt_dlp.postprocessor', 'yt_dlp.compat',
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def py2exe_params():
|
|
||||||
warnings.warn(
|
|
||||||
'py2exe builds do not support pycryptodomex and needs VC++14 to run. '
|
|
||||||
'It is recommended to run "pyinst.py" to build using pyinstaller instead')
|
|
||||||
|
|
||||||
return {
|
|
||||||
'console': [{
|
|
||||||
'script': './yt_dlp/__main__.py',
|
|
||||||
'dest_base': 'yt-dlp',
|
|
||||||
'icon_resources': [(1, 'devscripts/logo.ico')],
|
|
||||||
}],
|
|
||||||
'version_info': {
|
|
||||||
'version': VERSION,
|
|
||||||
'description': DESCRIPTION,
|
|
||||||
'comments': LONG_DESCRIPTION.split('\n')[0],
|
|
||||||
'product_name': 'yt-dlp',
|
|
||||||
'product_version': VERSION,
|
|
||||||
},
|
|
||||||
'options': {
|
|
||||||
'bundle_files': 0,
|
|
||||||
'compressed': 1,
|
|
||||||
'optimize': 2,
|
|
||||||
'dist_dir': './dist',
|
|
||||||
'excludes': ['Crypto', 'Cryptodome'], # py2exe cannot import Crypto
|
|
||||||
'dll_excludes': ['w9xpopen.exe', 'crypt32.dll'],
|
|
||||||
# Modules that are only imported dynamically must be added here
|
|
||||||
'includes': ['yt_dlp.compat._legacy'],
|
|
||||||
},
|
|
||||||
'zipfile': None,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def build_params():
|
|
||||||
files_spec = [
|
|
||||||
('share/bash-completion/completions', ['completions/bash/yt-dlp']),
|
|
||||||
('share/zsh/site-functions', ['completions/zsh/_yt-dlp']),
|
|
||||||
('share/fish/vendor_completions.d', ['completions/fish/yt-dlp.fish']),
|
|
||||||
('share/doc/yt_dlp', ['README.txt']),
|
|
||||||
('share/man/man1', ['yt-dlp.1'])
|
|
||||||
]
|
|
||||||
data_files = []
|
|
||||||
for dirname, files in files_spec:
|
|
||||||
resfiles = []
|
|
||||||
for fn in files:
|
|
||||||
if not os.path.exists(fn):
|
|
||||||
warnings.warn(f'Skipping file {fn} since it is not present. Try running " make pypi-files " first')
|
|
||||||
else:
|
|
||||||
resfiles.append(fn)
|
|
||||||
data_files.append((dirname, resfiles))
|
|
||||||
|
|
||||||
params = {'data_files': data_files}
|
|
||||||
|
|
||||||
if setuptools_available:
|
|
||||||
params['entry_points'] = {
|
|
||||||
'console_scripts': ['yt-dlp = yt_dlp:main'],
|
|
||||||
'pyinstaller40': ['hook-dirs = yt_dlp.__pyinstaller:get_hook_dirs'],
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
params['scripts'] = ['yt-dlp']
|
|
||||||
return params
|
|
||||||
|
|
||||||
|
|
||||||
class build_lazy_extractors(Command):
|
|
||||||
description = 'Build the extractor lazy loading module'
|
|
||||||
user_options = []
|
|
||||||
|
|
||||||
def initialize_options(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def finalize_options(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
if self.dry_run:
|
|
||||||
print('Skipping build of lazy extractors in dry run mode')
|
|
||||||
return
|
|
||||||
subprocess.run([sys.executable, 'devscripts/make_lazy_extractors.py'])
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
if sys.argv[1:2] == ['py2exe']:
|
|
||||||
params = py2exe_params()
|
|
||||||
try:
|
|
||||||
from py2exe import freeze
|
|
||||||
except ImportError:
|
|
||||||
import py2exe # noqa: F401
|
|
||||||
warnings.warn('You are using an outdated version of py2exe. Support for this version will be removed in the future')
|
|
||||||
params['console'][0].update(params.pop('version_info'))
|
|
||||||
params['options'] = {'py2exe': params.pop('options')}
|
|
||||||
else:
|
|
||||||
return freeze(**params)
|
|
||||||
else:
|
|
||||||
params = build_params()
|
|
||||||
|
|
||||||
setup(
|
|
||||||
name='yt-dlp',
|
|
||||||
version=VERSION,
|
|
||||||
maintainer='pukkandan',
|
|
||||||
maintainer_email='pukkandan.ytdlp@gmail.com',
|
|
||||||
description=DESCRIPTION,
|
|
||||||
long_description=LONG_DESCRIPTION,
|
|
||||||
long_description_content_type='text/markdown',
|
|
||||||
url='https://github.com/yt-dlp/yt-dlp',
|
|
||||||
packages=packages(),
|
|
||||||
install_requires=REQUIREMENTS,
|
|
||||||
python_requires='>=3.7',
|
|
||||||
project_urls={
|
|
||||||
'Documentation': 'https://github.com/yt-dlp/yt-dlp#readme',
|
|
||||||
'Source': 'https://github.com/yt-dlp/yt-dlp',
|
|
||||||
'Tracker': 'https://github.com/yt-dlp/yt-dlp/issues',
|
|
||||||
'Funding': 'https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators',
|
|
||||||
},
|
|
||||||
classifiers=[
|
|
||||||
'Topic :: Multimedia :: Video',
|
|
||||||
'Development Status :: 5 - Production/Stable',
|
|
||||||
'Environment :: Console',
|
|
||||||
'Programming Language :: Python',
|
|
||||||
'Programming Language :: Python :: 3.7',
|
|
||||||
'Programming Language :: Python :: 3.8',
|
|
||||||
'Programming Language :: Python :: 3.9',
|
|
||||||
'Programming Language :: Python :: 3.10',
|
|
||||||
'Programming Language :: Python :: 3.11',
|
|
||||||
'Programming Language :: Python :: Implementation',
|
|
||||||
'Programming Language :: Python :: Implementation :: CPython',
|
|
||||||
'Programming Language :: Python :: Implementation :: PyPy',
|
|
||||||
'License :: Public Domain',
|
|
||||||
'Operating System :: OS Independent',
|
|
||||||
],
|
|
||||||
cmdclass={'build_lazy_extractors': build_lazy_extractors},
|
|
||||||
**params
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
main()
|
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,64 @@
|
|||||||
|
import inspect
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from yt_dlp.networking import RequestHandler
|
||||||
|
from yt_dlp.networking.common import _REQUEST_HANDLERS
|
||||||
|
from yt_dlp.utils._utils import _YDLLogger as FakeLogger
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def handler(request):
|
||||||
|
RH_KEY = getattr(request, 'param', None)
|
||||||
|
if not RH_KEY:
|
||||||
|
return
|
||||||
|
if inspect.isclass(RH_KEY) and issubclass(RH_KEY, RequestHandler):
|
||||||
|
handler = RH_KEY
|
||||||
|
elif RH_KEY in _REQUEST_HANDLERS:
|
||||||
|
handler = _REQUEST_HANDLERS[RH_KEY]
|
||||||
|
else:
|
||||||
|
pytest.skip(f'{RH_KEY} request handler is not available')
|
||||||
|
|
||||||
|
class HandlerWrapper(handler):
|
||||||
|
RH_KEY = handler.RH_KEY
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
super().__init__(logger=FakeLogger, **kwargs)
|
||||||
|
|
||||||
|
return HandlerWrapper
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def skip_handler(request, handler):
|
||||||
|
"""usage: pytest.mark.skip_handler('my_handler', 'reason')"""
|
||||||
|
for marker in request.node.iter_markers('skip_handler'):
|
||||||
|
if marker.args[0] == handler.RH_KEY:
|
||||||
|
pytest.skip(marker.args[1] if len(marker.args) > 1 else '')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def skip_handler_if(request, handler):
|
||||||
|
"""usage: pytest.mark.skip_handler_if('my_handler', lambda request: True, 'reason')"""
|
||||||
|
for marker in request.node.iter_markers('skip_handler_if'):
|
||||||
|
if marker.args[0] == handler.RH_KEY and marker.args[1](request):
|
||||||
|
pytest.skip(marker.args[2] if len(marker.args) > 2 else '')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def skip_handlers_if(request, handler):
|
||||||
|
"""usage: pytest.mark.skip_handlers_if(lambda request, handler: True, 'reason')"""
|
||||||
|
for marker in request.node.iter_markers('skip_handlers_if'):
|
||||||
|
if handler and marker.args[0](request, handler):
|
||||||
|
pytest.skip(marker.args[1] if len(marker.args) > 1 else '')
|
||||||
|
|
||||||
|
|
||||||
|
def pytest_configure(config):
|
||||||
|
config.addinivalue_line(
|
||||||
|
'markers', 'skip_handler(handler): skip test for the given handler',
|
||||||
|
)
|
||||||
|
config.addinivalue_line(
|
||||||
|
'markers', 'skip_handler_if(handler): skip test for the given handler if condition is true',
|
||||||
|
)
|
||||||
|
config.addinivalue_line(
|
||||||
|
'markers', 'skip_handlers_if(handler): skip test for handlers when the condition is true',
|
||||||
|
)
|
@ -0,0 +1,139 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import http.cookiejar
|
||||||
|
|
||||||
|
from test.helper import FakeYDL
|
||||||
|
from yt_dlp.downloader.external import (
|
||||||
|
Aria2cFD,
|
||||||
|
AxelFD,
|
||||||
|
CurlFD,
|
||||||
|
FFmpegFD,
|
||||||
|
HttpieFD,
|
||||||
|
WgetFD,
|
||||||
|
)
|
||||||
|
|
||||||
|
TEST_COOKIE = {
|
||||||
|
'version': 0,
|
||||||
|
'name': 'test',
|
||||||
|
'value': 'ytdlp',
|
||||||
|
'port': None,
|
||||||
|
'port_specified': False,
|
||||||
|
'domain': '.example.com',
|
||||||
|
'domain_specified': True,
|
||||||
|
'domain_initial_dot': False,
|
||||||
|
'path': '/',
|
||||||
|
'path_specified': True,
|
||||||
|
'secure': False,
|
||||||
|
'expires': None,
|
||||||
|
'discard': False,
|
||||||
|
'comment': None,
|
||||||
|
'comment_url': None,
|
||||||
|
'rest': {},
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_INFO = {'url': 'http://www.example.com/'}
|
||||||
|
|
||||||
|
|
||||||
|
class TestHttpieFD(unittest.TestCase):
|
||||||
|
def test_make_cmd(self):
|
||||||
|
with FakeYDL() as ydl:
|
||||||
|
downloader = HttpieFD(ydl, {})
|
||||||
|
self.assertEqual(
|
||||||
|
downloader._make_cmd('test', TEST_INFO),
|
||||||
|
['http', '--download', '--output', 'test', 'http://www.example.com/'])
|
||||||
|
|
||||||
|
# Test cookie header is added
|
||||||
|
ydl.cookiejar.set_cookie(http.cookiejar.Cookie(**TEST_COOKIE))
|
||||||
|
self.assertEqual(
|
||||||
|
downloader._make_cmd('test', TEST_INFO),
|
||||||
|
['http', '--download', '--output', 'test', 'http://www.example.com/', 'Cookie:test=ytdlp'])
|
||||||
|
|
||||||
|
|
||||||
|
class TestAxelFD(unittest.TestCase):
|
||||||
|
def test_make_cmd(self):
|
||||||
|
with FakeYDL() as ydl:
|
||||||
|
downloader = AxelFD(ydl, {})
|
||||||
|
self.assertEqual(
|
||||||
|
downloader._make_cmd('test', TEST_INFO),
|
||||||
|
['axel', '-o', 'test', '--', 'http://www.example.com/'])
|
||||||
|
|
||||||
|
# Test cookie header is added
|
||||||
|
ydl.cookiejar.set_cookie(http.cookiejar.Cookie(**TEST_COOKIE))
|
||||||
|
self.assertEqual(
|
||||||
|
downloader._make_cmd('test', TEST_INFO),
|
||||||
|
['axel', '-o', 'test', '-H', 'Cookie: test=ytdlp', '--max-redirect=0', '--', 'http://www.example.com/'])
|
||||||
|
|
||||||
|
|
||||||
|
class TestWgetFD(unittest.TestCase):
|
||||||
|
def test_make_cmd(self):
|
||||||
|
with FakeYDL() as ydl:
|
||||||
|
downloader = WgetFD(ydl, {})
|
||||||
|
self.assertNotIn('--load-cookies', downloader._make_cmd('test', TEST_INFO))
|
||||||
|
# Test cookiejar tempfile arg is added
|
||||||
|
ydl.cookiejar.set_cookie(http.cookiejar.Cookie(**TEST_COOKIE))
|
||||||
|
self.assertIn('--load-cookies', downloader._make_cmd('test', TEST_INFO))
|
||||||
|
|
||||||
|
|
||||||
|
class TestCurlFD(unittest.TestCase):
|
||||||
|
def test_make_cmd(self):
|
||||||
|
with FakeYDL() as ydl:
|
||||||
|
downloader = CurlFD(ydl, {})
|
||||||
|
self.assertNotIn('--cookie', downloader._make_cmd('test', TEST_INFO))
|
||||||
|
# Test cookie header is added
|
||||||
|
ydl.cookiejar.set_cookie(http.cookiejar.Cookie(**TEST_COOKIE))
|
||||||
|
self.assertIn('--cookie', downloader._make_cmd('test', TEST_INFO))
|
||||||
|
self.assertIn('test=ytdlp', downloader._make_cmd('test', TEST_INFO))
|
||||||
|
|
||||||
|
|
||||||
|
class TestAria2cFD(unittest.TestCase):
|
||||||
|
def test_make_cmd(self):
|
||||||
|
with FakeYDL() as ydl:
|
||||||
|
downloader = Aria2cFD(ydl, {})
|
||||||
|
downloader._make_cmd('test', TEST_INFO)
|
||||||
|
self.assertFalse(hasattr(downloader, '_cookies_tempfile'))
|
||||||
|
|
||||||
|
# Test cookiejar tempfile arg is added
|
||||||
|
ydl.cookiejar.set_cookie(http.cookiejar.Cookie(**TEST_COOKIE))
|
||||||
|
cmd = downloader._make_cmd('test', TEST_INFO)
|
||||||
|
self.assertIn(f'--load-cookies={downloader._cookies_tempfile}', cmd)
|
||||||
|
|
||||||
|
|
||||||
|
@unittest.skipUnless(FFmpegFD.available(), 'ffmpeg not found')
|
||||||
|
class TestFFmpegFD(unittest.TestCase):
|
||||||
|
_args = []
|
||||||
|
|
||||||
|
def _test_cmd(self, args):
|
||||||
|
self._args = args
|
||||||
|
|
||||||
|
def test_make_cmd(self):
|
||||||
|
with FakeYDL() as ydl:
|
||||||
|
downloader = FFmpegFD(ydl, {})
|
||||||
|
downloader._debug_cmd = self._test_cmd
|
||||||
|
|
||||||
|
downloader._call_downloader('test', {**TEST_INFO, 'ext': 'mp4'})
|
||||||
|
self.assertEqual(self._args, [
|
||||||
|
'ffmpeg', '-y', '-hide_banner', '-i', 'http://www.example.com/',
|
||||||
|
'-c', 'copy', '-f', 'mp4', 'file:test'])
|
||||||
|
|
||||||
|
# Test cookies arg is added
|
||||||
|
ydl.cookiejar.set_cookie(http.cookiejar.Cookie(**TEST_COOKIE))
|
||||||
|
downloader._call_downloader('test', {**TEST_INFO, 'ext': 'mp4'})
|
||||||
|
self.assertEqual(self._args, [
|
||||||
|
'ffmpeg', '-y', '-hide_banner', '-cookies', 'test=ytdlp; path=/; domain=.example.com;\r\n',
|
||||||
|
'-i', 'http://www.example.com/', '-c', 'copy', '-f', 'mp4', 'file:test'])
|
||||||
|
|
||||||
|
# Test with non-url input (ffmpeg reads from stdin '-' for websockets)
|
||||||
|
downloader._call_downloader('test', {'url': 'x', 'ext': 'mp4'})
|
||||||
|
self.assertEqual(self._args, [
|
||||||
|
'ffmpeg', '-y', '-hide_banner', '-i', 'x', '-c', 'copy', '-f', 'mp4', 'file:test'])
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
@ -1,500 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# Allow direct execution
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
import gzip
|
|
||||||
import http.cookiejar
|
|
||||||
import http.server
|
|
||||||
import io
|
|
||||||
import pathlib
|
|
||||||
import ssl
|
|
||||||
import tempfile
|
|
||||||
import threading
|
|
||||||
import urllib.error
|
|
||||||
import urllib.request
|
|
||||||
import zlib
|
|
||||||
|
|
||||||
from test.helper import http_server_port
|
|
||||||
from yt_dlp import YoutubeDL
|
|
||||||
from yt_dlp.dependencies import brotli
|
|
||||||
from yt_dlp.utils import sanitized_Request, urlencode_postdata
|
|
||||||
|
|
||||||
from .helper import FakeYDL
|
|
||||||
|
|
||||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
|
|
||||||
protocol_version = 'HTTP/1.1'
|
|
||||||
|
|
||||||
def log_message(self, format, *args):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def _headers(self):
|
|
||||||
payload = str(self.headers).encode('utf-8')
|
|
||||||
self.send_response(200)
|
|
||||||
self.send_header('Content-Type', 'application/json')
|
|
||||||
self.send_header('Content-Length', str(len(payload)))
|
|
||||||
self.end_headers()
|
|
||||||
self.wfile.write(payload)
|
|
||||||
|
|
||||||
def _redirect(self):
|
|
||||||
self.send_response(int(self.path[len('/redirect_'):]))
|
|
||||||
self.send_header('Location', '/method')
|
|
||||||
self.send_header('Content-Length', '0')
|
|
||||||
self.end_headers()
|
|
||||||
|
|
||||||
def _method(self, method, payload=None):
|
|
||||||
self.send_response(200)
|
|
||||||
self.send_header('Content-Length', str(len(payload or '')))
|
|
||||||
self.send_header('Method', method)
|
|
||||||
self.end_headers()
|
|
||||||
if payload:
|
|
||||||
self.wfile.write(payload)
|
|
||||||
|
|
||||||
def _status(self, status):
|
|
||||||
payload = f'<html>{status} NOT FOUND</html>'.encode()
|
|
||||||
self.send_response(int(status))
|
|
||||||
self.send_header('Content-Type', 'text/html; charset=utf-8')
|
|
||||||
self.send_header('Content-Length', str(len(payload)))
|
|
||||||
self.end_headers()
|
|
||||||
self.wfile.write(payload)
|
|
||||||
|
|
||||||
def _read_data(self):
|
|
||||||
if 'Content-Length' in self.headers:
|
|
||||||
return self.rfile.read(int(self.headers['Content-Length']))
|
|
||||||
|
|
||||||
def do_POST(self):
|
|
||||||
data = self._read_data()
|
|
||||||
if self.path.startswith('/redirect_'):
|
|
||||||
self._redirect()
|
|
||||||
elif self.path.startswith('/method'):
|
|
||||||
self._method('POST', data)
|
|
||||||
elif self.path.startswith('/headers'):
|
|
||||||
self._headers()
|
|
||||||
else:
|
|
||||||
self._status(404)
|
|
||||||
|
|
||||||
def do_HEAD(self):
|
|
||||||
if self.path.startswith('/redirect_'):
|
|
||||||
self._redirect()
|
|
||||||
elif self.path.startswith('/method'):
|
|
||||||
self._method('HEAD')
|
|
||||||
else:
|
|
||||||
self._status(404)
|
|
||||||
|
|
||||||
def do_PUT(self):
|
|
||||||
data = self._read_data()
|
|
||||||
if self.path.startswith('/redirect_'):
|
|
||||||
self._redirect()
|
|
||||||
elif self.path.startswith('/method'):
|
|
||||||
self._method('PUT', data)
|
|
||||||
else:
|
|
||||||
self._status(404)
|
|
||||||
|
|
||||||
def do_GET(self):
|
|
||||||
if self.path == '/video.html':
|
|
||||||
payload = b'<html><video src="/vid.mp4" /></html>'
|
|
||||||
self.send_response(200)
|
|
||||||
self.send_header('Content-Type', 'text/html; charset=utf-8')
|
|
||||||
self.send_header('Content-Length', str(len(payload))) # required for persistent connections
|
|
||||||
self.end_headers()
|
|
||||||
self.wfile.write(payload)
|
|
||||||
elif self.path == '/vid.mp4':
|
|
||||||
payload = b'\x00\x00\x00\x00\x20\x66\x74[video]'
|
|
||||||
self.send_response(200)
|
|
||||||
self.send_header('Content-Type', 'video/mp4')
|
|
||||||
self.send_header('Content-Length', str(len(payload)))
|
|
||||||
self.end_headers()
|
|
||||||
self.wfile.write(payload)
|
|
||||||
elif self.path == '/%E4%B8%AD%E6%96%87.html':
|
|
||||||
payload = b'<html><video src="/vid.mp4" /></html>'
|
|
||||||
self.send_response(200)
|
|
||||||
self.send_header('Content-Type', 'text/html; charset=utf-8')
|
|
||||||
self.send_header('Content-Length', str(len(payload)))
|
|
||||||
self.end_headers()
|
|
||||||
self.wfile.write(payload)
|
|
||||||
elif self.path == '/%c7%9f':
|
|
||||||
payload = b'<html><video src="/vid.mp4" /></html>'
|
|
||||||
self.send_response(200)
|
|
||||||
self.send_header('Content-Type', 'text/html; charset=utf-8')
|
|
||||||
self.send_header('Content-Length', str(len(payload)))
|
|
||||||
self.end_headers()
|
|
||||||
self.wfile.write(payload)
|
|
||||||
elif self.path.startswith('/redirect_'):
|
|
||||||
self._redirect()
|
|
||||||
elif self.path.startswith('/method'):
|
|
||||||
self._method('GET')
|
|
||||||
elif self.path.startswith('/headers'):
|
|
||||||
self._headers()
|
|
||||||
elif self.path == '/trailing_garbage':
|
|
||||||
payload = b'<html><video src="/vid.mp4" /></html>'
|
|
||||||
self.send_response(200)
|
|
||||||
self.send_header('Content-Type', 'text/html; charset=utf-8')
|
|
||||||
self.send_header('Content-Encoding', 'gzip')
|
|
||||||
buf = io.BytesIO()
|
|
||||||
with gzip.GzipFile(fileobj=buf, mode='wb') as f:
|
|
||||||
f.write(payload)
|
|
||||||
compressed = buf.getvalue() + b'trailing garbage'
|
|
||||||
self.send_header('Content-Length', str(len(compressed)))
|
|
||||||
self.end_headers()
|
|
||||||
self.wfile.write(compressed)
|
|
||||||
elif self.path == '/302-non-ascii-redirect':
|
|
||||||
new_url = f'http://127.0.0.1:{http_server_port(self.server)}/中文.html'
|
|
||||||
self.send_response(301)
|
|
||||||
self.send_header('Location', new_url)
|
|
||||||
self.send_header('Content-Length', '0')
|
|
||||||
self.end_headers()
|
|
||||||
elif self.path == '/content-encoding':
|
|
||||||
encodings = self.headers.get('ytdl-encoding', '')
|
|
||||||
payload = b'<html><video src="/vid.mp4" /></html>'
|
|
||||||
for encoding in filter(None, (e.strip() for e in encodings.split(','))):
|
|
||||||
if encoding == 'br' and brotli:
|
|
||||||
payload = brotli.compress(payload)
|
|
||||||
elif encoding == 'gzip':
|
|
||||||
buf = io.BytesIO()
|
|
||||||
with gzip.GzipFile(fileobj=buf, mode='wb') as f:
|
|
||||||
f.write(payload)
|
|
||||||
payload = buf.getvalue()
|
|
||||||
elif encoding == 'deflate':
|
|
||||||
payload = zlib.compress(payload)
|
|
||||||
elif encoding == 'unsupported':
|
|
||||||
payload = b'raw'
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
self._status(415)
|
|
||||||
return
|
|
||||||
self.send_response(200)
|
|
||||||
self.send_header('Content-Encoding', encodings)
|
|
||||||
self.send_header('Content-Length', str(len(payload)))
|
|
||||||
self.end_headers()
|
|
||||||
self.wfile.write(payload)
|
|
||||||
|
|
||||||
else:
|
|
||||||
self._status(404)
|
|
||||||
|
|
||||||
def send_header(self, keyword, value):
|
|
||||||
"""
|
|
||||||
Forcibly allow HTTP server to send non percent-encoded non-ASCII characters in headers.
|
|
||||||
This is against what is defined in RFC 3986, however we need to test we support this
|
|
||||||
since some sites incorrectly do this.
|
|
||||||
"""
|
|
||||||
if keyword.lower() == 'connection':
|
|
||||||
return super().send_header(keyword, value)
|
|
||||||
|
|
||||||
if not hasattr(self, '_headers_buffer'):
|
|
||||||
self._headers_buffer = []
|
|
||||||
|
|
||||||
self._headers_buffer.append(f'{keyword}: {value}\r\n'.encode())
|
|
||||||
|
|
||||||
|
|
||||||
class FakeLogger:
|
|
||||||
def debug(self, msg):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def warning(self, msg):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def error(self, msg):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class TestHTTP(unittest.TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
# HTTP server
|
|
||||||
self.http_httpd = http.server.ThreadingHTTPServer(
|
|
||||||
('127.0.0.1', 0), HTTPTestRequestHandler)
|
|
||||||
self.http_port = http_server_port(self.http_httpd)
|
|
||||||
self.http_server_thread = threading.Thread(target=self.http_httpd.serve_forever)
|
|
||||||
# FIXME: we should probably stop the http server thread after each test
|
|
||||||
# See: https://github.com/yt-dlp/yt-dlp/pull/7094#discussion_r1199746041
|
|
||||||
self.http_server_thread.daemon = True
|
|
||||||
self.http_server_thread.start()
|
|
||||||
|
|
||||||
# HTTPS server
|
|
||||||
certfn = os.path.join(TEST_DIR, 'testcert.pem')
|
|
||||||
self.https_httpd = http.server.ThreadingHTTPServer(
|
|
||||||
('127.0.0.1', 0), HTTPTestRequestHandler)
|
|
||||||
sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
|
||||||
sslctx.load_cert_chain(certfn, None)
|
|
||||||
self.https_httpd.socket = sslctx.wrap_socket(self.https_httpd.socket, server_side=True)
|
|
||||||
self.https_port = http_server_port(self.https_httpd)
|
|
||||||
self.https_server_thread = threading.Thread(target=self.https_httpd.serve_forever)
|
|
||||||
self.https_server_thread.daemon = True
|
|
||||||
self.https_server_thread.start()
|
|
||||||
|
|
||||||
def test_nocheckcertificate(self):
|
|
||||||
with FakeYDL({'logger': FakeLogger()}) as ydl:
|
|
||||||
with self.assertRaises(urllib.error.URLError):
|
|
||||||
ydl.urlopen(sanitized_Request(f'https://127.0.0.1:{self.https_port}/headers'))
|
|
||||||
|
|
||||||
with FakeYDL({'logger': FakeLogger(), 'nocheckcertificate': True}) as ydl:
|
|
||||||
r = ydl.urlopen(sanitized_Request(f'https://127.0.0.1:{self.https_port}/headers'))
|
|
||||||
self.assertEqual(r.status, 200)
|
|
||||||
r.close()
|
|
||||||
|
|
||||||
def test_percent_encode(self):
|
|
||||||
with FakeYDL() as ydl:
|
|
||||||
# Unicode characters should be encoded with uppercase percent-encoding
|
|
||||||
res = ydl.urlopen(sanitized_Request(f'http://127.0.0.1:{self.http_port}/中文.html'))
|
|
||||||
self.assertEqual(res.status, 200)
|
|
||||||
res.close()
|
|
||||||
# don't normalize existing percent encodings
|
|
||||||
res = ydl.urlopen(sanitized_Request(f'http://127.0.0.1:{self.http_port}/%c7%9f'))
|
|
||||||
self.assertEqual(res.status, 200)
|
|
||||||
res.close()
|
|
||||||
|
|
||||||
def test_unicode_path_redirection(self):
|
|
||||||
with FakeYDL() as ydl:
|
|
||||||
r = ydl.urlopen(sanitized_Request(f'http://127.0.0.1:{self.http_port}/302-non-ascii-redirect'))
|
|
||||||
self.assertEqual(r.url, f'http://127.0.0.1:{self.http_port}/%E4%B8%AD%E6%96%87.html')
|
|
||||||
r.close()
|
|
||||||
|
|
||||||
def test_redirect(self):
|
|
||||||
with FakeYDL() as ydl:
|
|
||||||
def do_req(redirect_status, method):
|
|
||||||
data = b'testdata' if method in ('POST', 'PUT') else None
|
|
||||||
res = ydl.urlopen(sanitized_Request(
|
|
||||||
f'http://127.0.0.1:{self.http_port}/redirect_{redirect_status}', method=method, data=data))
|
|
||||||
return res.read().decode('utf-8'), res.headers.get('method', '')
|
|
||||||
|
|
||||||
# A 303 must either use GET or HEAD for subsequent request
|
|
||||||
self.assertEqual(do_req(303, 'POST'), ('', 'GET'))
|
|
||||||
self.assertEqual(do_req(303, 'HEAD'), ('', 'HEAD'))
|
|
||||||
|
|
||||||
self.assertEqual(do_req(303, 'PUT'), ('', 'GET'))
|
|
||||||
|
|
||||||
# 301 and 302 turn POST only into a GET
|
|
||||||
self.assertEqual(do_req(301, 'POST'), ('', 'GET'))
|
|
||||||
self.assertEqual(do_req(301, 'HEAD'), ('', 'HEAD'))
|
|
||||||
self.assertEqual(do_req(302, 'POST'), ('', 'GET'))
|
|
||||||
self.assertEqual(do_req(302, 'HEAD'), ('', 'HEAD'))
|
|
||||||
|
|
||||||
self.assertEqual(do_req(301, 'PUT'), ('testdata', 'PUT'))
|
|
||||||
self.assertEqual(do_req(302, 'PUT'), ('testdata', 'PUT'))
|
|
||||||
|
|
||||||
# 307 and 308 should not change method
|
|
||||||
for m in ('POST', 'PUT'):
|
|
||||||
self.assertEqual(do_req(307, m), ('testdata', m))
|
|
||||||
self.assertEqual(do_req(308, m), ('testdata', m))
|
|
||||||
|
|
||||||
self.assertEqual(do_req(307, 'HEAD'), ('', 'HEAD'))
|
|
||||||
self.assertEqual(do_req(308, 'HEAD'), ('', 'HEAD'))
|
|
||||||
|
|
||||||
# These should not redirect and instead raise an HTTPError
|
|
||||||
for code in (300, 304, 305, 306):
|
|
||||||
with self.assertRaises(urllib.error.HTTPError):
|
|
||||||
do_req(code, 'GET')
|
|
||||||
|
|
||||||
def test_content_type(self):
|
|
||||||
# https://github.com/yt-dlp/yt-dlp/commit/379a4f161d4ad3e40932dcf5aca6e6fb9715ab28
|
|
||||||
with FakeYDL({'nocheckcertificate': True}) as ydl:
|
|
||||||
# method should be auto-detected as POST
|
|
||||||
r = sanitized_Request(f'https://localhost:{self.https_port}/headers', data=urlencode_postdata({'test': 'test'}))
|
|
||||||
|
|
||||||
headers = ydl.urlopen(r).read().decode('utf-8')
|
|
||||||
self.assertIn('Content-Type: application/x-www-form-urlencoded', headers)
|
|
||||||
|
|
||||||
# test http
|
|
||||||
r = sanitized_Request(f'http://localhost:{self.http_port}/headers', data=urlencode_postdata({'test': 'test'}))
|
|
||||||
headers = ydl.urlopen(r).read().decode('utf-8')
|
|
||||||
self.assertIn('Content-Type: application/x-www-form-urlencoded', headers)
|
|
||||||
|
|
||||||
def test_cookiejar(self):
|
|
||||||
with FakeYDL() as ydl:
|
|
||||||
ydl.cookiejar.set_cookie(http.cookiejar.Cookie(
|
|
||||||
0, 'test', 'ytdlp', None, False, '127.0.0.1', True,
|
|
||||||
False, '/headers', True, False, None, False, None, None, {}))
|
|
||||||
data = ydl.urlopen(sanitized_Request(f'http://127.0.0.1:{self.http_port}/headers')).read()
|
|
||||||
self.assertIn(b'Cookie: test=ytdlp', data)
|
|
||||||
|
|
||||||
def test_no_compression_compat_header(self):
|
|
||||||
with FakeYDL() as ydl:
|
|
||||||
data = ydl.urlopen(
|
|
||||||
sanitized_Request(
|
|
||||||
f'http://127.0.0.1:{self.http_port}/headers',
|
|
||||||
headers={'Youtubedl-no-compression': True})).read()
|
|
||||||
self.assertIn(b'Accept-Encoding: identity', data)
|
|
||||||
self.assertNotIn(b'youtubedl-no-compression', data.lower())
|
|
||||||
|
|
||||||
def test_gzip_trailing_garbage(self):
|
|
||||||
# https://github.com/ytdl-org/youtube-dl/commit/aa3e950764337ef9800c936f4de89b31c00dfcf5
|
|
||||||
# https://github.com/ytdl-org/youtube-dl/commit/6f2ec15cee79d35dba065677cad9da7491ec6e6f
|
|
||||||
with FakeYDL() as ydl:
|
|
||||||
data = ydl.urlopen(sanitized_Request(f'http://localhost:{self.http_port}/trailing_garbage')).read().decode('utf-8')
|
|
||||||
self.assertEqual(data, '<html><video src="/vid.mp4" /></html>')
|
|
||||||
|
|
||||||
@unittest.skipUnless(brotli, 'brotli support is not installed')
|
|
||||||
def test_brotli(self):
|
|
||||||
with FakeYDL() as ydl:
|
|
||||||
res = ydl.urlopen(
|
|
||||||
sanitized_Request(
|
|
||||||
f'http://127.0.0.1:{self.http_port}/content-encoding',
|
|
||||||
headers={'ytdl-encoding': 'br'}))
|
|
||||||
self.assertEqual(res.headers.get('Content-Encoding'), 'br')
|
|
||||||
self.assertEqual(res.read(), b'<html><video src="/vid.mp4" /></html>')
|
|
||||||
|
|
||||||
def test_deflate(self):
|
|
||||||
with FakeYDL() as ydl:
|
|
||||||
res = ydl.urlopen(
|
|
||||||
sanitized_Request(
|
|
||||||
f'http://127.0.0.1:{self.http_port}/content-encoding',
|
|
||||||
headers={'ytdl-encoding': 'deflate'}))
|
|
||||||
self.assertEqual(res.headers.get('Content-Encoding'), 'deflate')
|
|
||||||
self.assertEqual(res.read(), b'<html><video src="/vid.mp4" /></html>')
|
|
||||||
|
|
||||||
def test_gzip(self):
|
|
||||||
with FakeYDL() as ydl:
|
|
||||||
res = ydl.urlopen(
|
|
||||||
sanitized_Request(
|
|
||||||
f'http://127.0.0.1:{self.http_port}/content-encoding',
|
|
||||||
headers={'ytdl-encoding': 'gzip'}))
|
|
||||||
self.assertEqual(res.headers.get('Content-Encoding'), 'gzip')
|
|
||||||
self.assertEqual(res.read(), b'<html><video src="/vid.mp4" /></html>')
|
|
||||||
|
|
||||||
def test_multiple_encodings(self):
|
|
||||||
# https://www.rfc-editor.org/rfc/rfc9110.html#section-8.4
|
|
||||||
with FakeYDL() as ydl:
|
|
||||||
for pair in ('gzip,deflate', 'deflate, gzip', 'gzip, gzip', 'deflate, deflate'):
|
|
||||||
res = ydl.urlopen(
|
|
||||||
sanitized_Request(
|
|
||||||
f'http://127.0.0.1:{self.http_port}/content-encoding',
|
|
||||||
headers={'ytdl-encoding': pair}))
|
|
||||||
self.assertEqual(res.headers.get('Content-Encoding'), pair)
|
|
||||||
self.assertEqual(res.read(), b'<html><video src="/vid.mp4" /></html>')
|
|
||||||
|
|
||||||
def test_unsupported_encoding(self):
|
|
||||||
# it should return the raw content
|
|
||||||
with FakeYDL() as ydl:
|
|
||||||
res = ydl.urlopen(
|
|
||||||
sanitized_Request(
|
|
||||||
f'http://127.0.0.1:{self.http_port}/content-encoding',
|
|
||||||
headers={'ytdl-encoding': 'unsupported'}))
|
|
||||||
self.assertEqual(res.headers.get('Content-Encoding'), 'unsupported')
|
|
||||||
self.assertEqual(res.read(), b'raw')
|
|
||||||
|
|
||||||
|
|
||||||
class TestClientCert(unittest.TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
certfn = os.path.join(TEST_DIR, 'testcert.pem')
|
|
||||||
self.certdir = os.path.join(TEST_DIR, 'testdata', 'certificate')
|
|
||||||
cacertfn = os.path.join(self.certdir, 'ca.crt')
|
|
||||||
self.httpd = http.server.HTTPServer(('127.0.0.1', 0), HTTPTestRequestHandler)
|
|
||||||
sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
|
||||||
sslctx.verify_mode = ssl.CERT_REQUIRED
|
|
||||||
sslctx.load_verify_locations(cafile=cacertfn)
|
|
||||||
sslctx.load_cert_chain(certfn, None)
|
|
||||||
self.httpd.socket = sslctx.wrap_socket(self.httpd.socket, server_side=True)
|
|
||||||
self.port = http_server_port(self.httpd)
|
|
||||||
self.server_thread = threading.Thread(target=self.httpd.serve_forever)
|
|
||||||
self.server_thread.daemon = True
|
|
||||||
self.server_thread.start()
|
|
||||||
|
|
||||||
def _run_test(self, **params):
|
|
||||||
ydl = YoutubeDL({
|
|
||||||
'logger': FakeLogger(),
|
|
||||||
# Disable client-side validation of unacceptable self-signed testcert.pem
|
|
||||||
# The test is of a check on the server side, so unaffected
|
|
||||||
'nocheckcertificate': True,
|
|
||||||
**params,
|
|
||||||
})
|
|
||||||
r = ydl.extract_info(f'https://127.0.0.1:{self.port}/video.html')
|
|
||||||
self.assertEqual(r['url'], f'https://127.0.0.1:{self.port}/vid.mp4')
|
|
||||||
|
|
||||||
def test_certificate_combined_nopass(self):
|
|
||||||
self._run_test(client_certificate=os.path.join(self.certdir, 'clientwithkey.crt'))
|
|
||||||
|
|
||||||
def test_certificate_nocombined_nopass(self):
|
|
||||||
self._run_test(client_certificate=os.path.join(self.certdir, 'client.crt'),
|
|
||||||
client_certificate_key=os.path.join(self.certdir, 'client.key'))
|
|
||||||
|
|
||||||
def test_certificate_combined_pass(self):
|
|
||||||
self._run_test(client_certificate=os.path.join(self.certdir, 'clientwithencryptedkey.crt'),
|
|
||||||
client_certificate_password='foobar')
|
|
||||||
|
|
||||||
def test_certificate_nocombined_pass(self):
|
|
||||||
self._run_test(client_certificate=os.path.join(self.certdir, 'client.crt'),
|
|
||||||
client_certificate_key=os.path.join(self.certdir, 'clientencrypted.key'),
|
|
||||||
client_certificate_password='foobar')
|
|
||||||
|
|
||||||
|
|
||||||
def _build_proxy_handler(name):
|
|
||||||
class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
|
|
||||||
proxy_name = name
|
|
||||||
|
|
||||||
def log_message(self, format, *args):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def do_GET(self):
|
|
||||||
self.send_response(200)
|
|
||||||
self.send_header('Content-Type', 'text/plain; charset=utf-8')
|
|
||||||
self.end_headers()
|
|
||||||
self.wfile.write(f'{self.proxy_name}: {self.path}'.encode())
|
|
||||||
return HTTPTestRequestHandler
|
|
||||||
|
|
||||||
|
|
||||||
class TestProxy(unittest.TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.proxy = http.server.HTTPServer(
|
|
||||||
('127.0.0.1', 0), _build_proxy_handler('normal'))
|
|
||||||
self.port = http_server_port(self.proxy)
|
|
||||||
self.proxy_thread = threading.Thread(target=self.proxy.serve_forever)
|
|
||||||
self.proxy_thread.daemon = True
|
|
||||||
self.proxy_thread.start()
|
|
||||||
|
|
||||||
self.geo_proxy = http.server.HTTPServer(
|
|
||||||
('127.0.0.1', 0), _build_proxy_handler('geo'))
|
|
||||||
self.geo_port = http_server_port(self.geo_proxy)
|
|
||||||
self.geo_proxy_thread = threading.Thread(target=self.geo_proxy.serve_forever)
|
|
||||||
self.geo_proxy_thread.daemon = True
|
|
||||||
self.geo_proxy_thread.start()
|
|
||||||
|
|
||||||
def test_proxy(self):
|
|
||||||
geo_proxy = f'127.0.0.1:{self.geo_port}'
|
|
||||||
ydl = YoutubeDL({
|
|
||||||
'proxy': f'127.0.0.1:{self.port}',
|
|
||||||
'geo_verification_proxy': geo_proxy,
|
|
||||||
})
|
|
||||||
url = 'http://foo.com/bar'
|
|
||||||
response = ydl.urlopen(url).read().decode()
|
|
||||||
self.assertEqual(response, f'normal: {url}')
|
|
||||||
|
|
||||||
req = urllib.request.Request(url)
|
|
||||||
req.add_header('Ytdl-request-proxy', geo_proxy)
|
|
||||||
response = ydl.urlopen(req).read().decode()
|
|
||||||
self.assertEqual(response, f'geo: {url}')
|
|
||||||
|
|
||||||
def test_proxy_with_idn(self):
|
|
||||||
ydl = YoutubeDL({
|
|
||||||
'proxy': f'127.0.0.1:{self.port}',
|
|
||||||
})
|
|
||||||
url = 'http://中文.tw/'
|
|
||||||
response = ydl.urlopen(url).read().decode()
|
|
||||||
# b'xn--fiq228c' is '中文'.encode('idna')
|
|
||||||
self.assertEqual(response, 'normal: http://xn--fiq228c.tw/')
|
|
||||||
|
|
||||||
|
|
||||||
class TestFileURL(unittest.TestCase):
|
|
||||||
# See https://github.com/ytdl-org/youtube-dl/issues/8227
|
|
||||||
def test_file_urls(self):
|
|
||||||
tf = tempfile.NamedTemporaryFile(delete=False)
|
|
||||||
tf.write(b'foobar')
|
|
||||||
tf.close()
|
|
||||||
url = pathlib.Path(tf.name).as_uri()
|
|
||||||
with FakeYDL() as ydl:
|
|
||||||
self.assertRaisesRegex(
|
|
||||||
urllib.error.URLError, 'file:// URLs are explicitly disabled in yt-dlp for security reasons', ydl.urlopen, url)
|
|
||||||
with FakeYDL({'enable_file_urls': True}) as ydl:
|
|
||||||
res = ydl.urlopen(url)
|
|
||||||
self.assertEqual(res.read(), b'foobar')
|
|
||||||
res.close()
|
|
||||||
os.unlink(tf.name)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
@ -0,0 +1,380 @@
|
|||||||
|
import abc
|
||||||
|
import base64
|
||||||
|
import contextlib
|
||||||
|
import functools
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import random
|
||||||
|
import ssl
|
||||||
|
import threading
|
||||||
|
from http.server import BaseHTTPRequestHandler
|
||||||
|
from socketserver import ThreadingTCPServer
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from test.helper import http_server_port, verify_address_availability
|
||||||
|
from test.test_networking import TEST_DIR
|
||||||
|
from test.test_socks import IPv6ThreadingTCPServer
|
||||||
|
from yt_dlp.dependencies import urllib3
|
||||||
|
from yt_dlp.networking import Request
|
||||||
|
from yt_dlp.networking.exceptions import HTTPError, ProxyError, SSLError
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPProxyAuthMixin:
|
||||||
|
|
||||||
|
def proxy_auth_error(self):
|
||||||
|
self.send_response(407)
|
||||||
|
self.send_header('Proxy-Authenticate', 'Basic realm="test http proxy"')
|
||||||
|
self.end_headers()
|
||||||
|
return False
|
||||||
|
|
||||||
|
def do_proxy_auth(self, username, password):
|
||||||
|
if username is None and password is None:
|
||||||
|
return True
|
||||||
|
|
||||||
|
proxy_auth_header = self.headers.get('Proxy-Authorization', None)
|
||||||
|
if proxy_auth_header is None:
|
||||||
|
return self.proxy_auth_error()
|
||||||
|
|
||||||
|
if not proxy_auth_header.startswith('Basic '):
|
||||||
|
return self.proxy_auth_error()
|
||||||
|
|
||||||
|
auth = proxy_auth_header[6:]
|
||||||
|
|
||||||
|
try:
|
||||||
|
auth_username, auth_password = base64.b64decode(auth).decode().split(':', 1)
|
||||||
|
except Exception:
|
||||||
|
return self.proxy_auth_error()
|
||||||
|
|
||||||
|
if auth_username != (username or '') or auth_password != (password or ''):
|
||||||
|
return self.proxy_auth_error()
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPProxyHandler(BaseHTTPRequestHandler, HTTPProxyAuthMixin):
|
||||||
|
def __init__(self, *args, proxy_info=None, username=None, password=None, request_handler=None, **kwargs):
|
||||||
|
self.username = username
|
||||||
|
self.password = password
|
||||||
|
self.proxy_info = proxy_info
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def do_GET(self):
|
||||||
|
if not self.do_proxy_auth(self.username, self.password):
|
||||||
|
self.server.close_request(self.request)
|
||||||
|
return
|
||||||
|
if self.path.endswith('/proxy_info'):
|
||||||
|
payload = json.dumps(self.proxy_info or {
|
||||||
|
'client_address': self.client_address,
|
||||||
|
'connect': False,
|
||||||
|
'connect_host': None,
|
||||||
|
'connect_port': None,
|
||||||
|
'headers': dict(self.headers),
|
||||||
|
'path': self.path,
|
||||||
|
'proxy': ':'.join(str(y) for y in self.connection.getsockname()),
|
||||||
|
})
|
||||||
|
self.send_response(200)
|
||||||
|
self.send_header('Content-Type', 'application/json; charset=utf-8')
|
||||||
|
self.send_header('Content-Length', str(len(payload)))
|
||||||
|
self.end_headers()
|
||||||
|
self.wfile.write(payload.encode())
|
||||||
|
else:
|
||||||
|
self.send_response(404)
|
||||||
|
self.end_headers()
|
||||||
|
|
||||||
|
self.server.close_request(self.request)
|
||||||
|
|
||||||
|
|
||||||
|
if urllib3:
|
||||||
|
import urllib3.util.ssltransport
|
||||||
|
|
||||||
|
class SSLTransport(urllib3.util.ssltransport.SSLTransport):
|
||||||
|
"""
|
||||||
|
Modified version of urllib3 SSLTransport to support server side SSL
|
||||||
|
|
||||||
|
This allows us to chain multiple TLS connections.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, socket, ssl_context, server_hostname=None, suppress_ragged_eofs=True, server_side=False):
|
||||||
|
self.incoming = ssl.MemoryBIO()
|
||||||
|
self.outgoing = ssl.MemoryBIO()
|
||||||
|
|
||||||
|
self.suppress_ragged_eofs = suppress_ragged_eofs
|
||||||
|
self.socket = socket
|
||||||
|
|
||||||
|
self.sslobj = ssl_context.wrap_bio(
|
||||||
|
self.incoming,
|
||||||
|
self.outgoing,
|
||||||
|
server_hostname=server_hostname,
|
||||||
|
server_side=server_side,
|
||||||
|
)
|
||||||
|
self._ssl_io_loop(self.sslobj.do_handshake)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _io_refs(self):
|
||||||
|
return self.socket._io_refs
|
||||||
|
|
||||||
|
@_io_refs.setter
|
||||||
|
def _io_refs(self, value):
|
||||||
|
self.socket._io_refs = value
|
||||||
|
|
||||||
|
def shutdown(self, *args, **kwargs):
|
||||||
|
self.socket.shutdown(*args, **kwargs)
|
||||||
|
else:
|
||||||
|
SSLTransport = None
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPSProxyHandler(HTTPProxyHandler):
|
||||||
|
def __init__(self, request, *args, **kwargs):
|
||||||
|
certfn = os.path.join(TEST_DIR, 'testcert.pem')
|
||||||
|
sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||||
|
sslctx.load_cert_chain(certfn, None)
|
||||||
|
if isinstance(request, ssl.SSLSocket):
|
||||||
|
request = SSLTransport(request, ssl_context=sslctx, server_side=True)
|
||||||
|
else:
|
||||||
|
request = sslctx.wrap_socket(request, server_side=True)
|
||||||
|
super().__init__(request, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPConnectProxyHandler(BaseHTTPRequestHandler, HTTPProxyAuthMixin):
|
||||||
|
protocol_version = 'HTTP/1.1'
|
||||||
|
default_request_version = 'HTTP/1.1'
|
||||||
|
|
||||||
|
def __init__(self, *args, username=None, password=None, request_handler=None, **kwargs):
|
||||||
|
self.username = username
|
||||||
|
self.password = password
|
||||||
|
self.request_handler = request_handler
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def do_CONNECT(self):
|
||||||
|
if not self.do_proxy_auth(self.username, self.password):
|
||||||
|
self.server.close_request(self.request)
|
||||||
|
return
|
||||||
|
self.send_response(200)
|
||||||
|
self.end_headers()
|
||||||
|
proxy_info = {
|
||||||
|
'client_address': self.client_address,
|
||||||
|
'connect': True,
|
||||||
|
'connect_host': self.path.split(':')[0],
|
||||||
|
'connect_port': int(self.path.split(':')[1]),
|
||||||
|
'headers': dict(self.headers),
|
||||||
|
'path': self.path,
|
||||||
|
'proxy': ':'.join(str(y) for y in self.connection.getsockname()),
|
||||||
|
}
|
||||||
|
self.request_handler(self.request, self.client_address, self.server, proxy_info=proxy_info)
|
||||||
|
self.server.close_request(self.request)
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPSConnectProxyHandler(HTTPConnectProxyHandler):
|
||||||
|
def __init__(self, request, *args, **kwargs):
|
||||||
|
certfn = os.path.join(TEST_DIR, 'testcert.pem')
|
||||||
|
sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||||
|
sslctx.load_cert_chain(certfn, None)
|
||||||
|
request = sslctx.wrap_socket(request, server_side=True)
|
||||||
|
self._original_request = request
|
||||||
|
super().__init__(request, *args, **kwargs)
|
||||||
|
|
||||||
|
def do_CONNECT(self):
|
||||||
|
super().do_CONNECT()
|
||||||
|
self.server.close_request(self._original_request)
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def proxy_server(proxy_server_class, request_handler, bind_ip=None, **proxy_server_kwargs):
|
||||||
|
server = server_thread = None
|
||||||
|
try:
|
||||||
|
bind_address = bind_ip or '127.0.0.1'
|
||||||
|
server_type = ThreadingTCPServer if '.' in bind_address else IPv6ThreadingTCPServer
|
||||||
|
server = server_type(
|
||||||
|
(bind_address, 0), functools.partial(proxy_server_class, request_handler=request_handler, **proxy_server_kwargs))
|
||||||
|
server_port = http_server_port(server)
|
||||||
|
server_thread = threading.Thread(target=server.serve_forever)
|
||||||
|
server_thread.daemon = True
|
||||||
|
server_thread.start()
|
||||||
|
if '.' not in bind_address:
|
||||||
|
yield f'[{bind_address}]:{server_port}'
|
||||||
|
else:
|
||||||
|
yield f'{bind_address}:{server_port}'
|
||||||
|
finally:
|
||||||
|
server.shutdown()
|
||||||
|
server.server_close()
|
||||||
|
server_thread.join(2.0)
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPProxyTestContext(abc.ABC):
|
||||||
|
REQUEST_HANDLER_CLASS = None
|
||||||
|
REQUEST_PROTO = None
|
||||||
|
|
||||||
|
def http_server(self, server_class, *args, **kwargs):
|
||||||
|
return proxy_server(server_class, self.REQUEST_HANDLER_CLASS, *args, **kwargs)
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def proxy_info_request(self, handler, target_domain=None, target_port=None, **req_kwargs) -> dict:
|
||||||
|
"""return a dict of proxy_info"""
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPProxyHTTPTestContext(HTTPProxyTestContext):
|
||||||
|
# Standard HTTP Proxy for http requests
|
||||||
|
REQUEST_HANDLER_CLASS = HTTPProxyHandler
|
||||||
|
REQUEST_PROTO = 'http'
|
||||||
|
|
||||||
|
def proxy_info_request(self, handler, target_domain=None, target_port=None, **req_kwargs):
|
||||||
|
request = Request(f'http://{target_domain or "127.0.0.1"}:{target_port or "40000"}/proxy_info', **req_kwargs)
|
||||||
|
handler.validate(request)
|
||||||
|
return json.loads(handler.send(request).read().decode())
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPProxyHTTPSTestContext(HTTPProxyTestContext):
|
||||||
|
# HTTP Connect proxy, for https requests
|
||||||
|
REQUEST_HANDLER_CLASS = HTTPSProxyHandler
|
||||||
|
REQUEST_PROTO = 'https'
|
||||||
|
|
||||||
|
def proxy_info_request(self, handler, target_domain=None, target_port=None, **req_kwargs):
|
||||||
|
request = Request(f'https://{target_domain or "127.0.0.1"}:{target_port or "40000"}/proxy_info', **req_kwargs)
|
||||||
|
handler.validate(request)
|
||||||
|
return json.loads(handler.send(request).read().decode())
|
||||||
|
|
||||||
|
|
||||||
|
CTX_MAP = {
|
||||||
|
'http': HTTPProxyHTTPTestContext,
|
||||||
|
'https': HTTPProxyHTTPSTestContext,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='module')
|
||||||
|
def ctx(request):
|
||||||
|
return CTX_MAP[request.param]()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||||
|
@pytest.mark.parametrize('ctx', ['http'], indirect=True) # pure http proxy can only support http
|
||||||
|
class TestHTTPProxy:
|
||||||
|
def test_http_no_auth(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPProxyHandler) as server_address:
|
||||||
|
with handler(proxies={ctx.REQUEST_PROTO: f'http://{server_address}'}) as rh:
|
||||||
|
proxy_info = ctx.proxy_info_request(rh)
|
||||||
|
assert proxy_info['proxy'] == server_address
|
||||||
|
assert proxy_info['connect'] is False
|
||||||
|
assert 'Proxy-Authorization' not in proxy_info['headers']
|
||||||
|
|
||||||
|
def test_http_auth(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPProxyHandler, username='test', password='test') as server_address:
|
||||||
|
with handler(proxies={ctx.REQUEST_PROTO: f'http://test:test@{server_address}'}) as rh:
|
||||||
|
proxy_info = ctx.proxy_info_request(rh)
|
||||||
|
assert proxy_info['proxy'] == server_address
|
||||||
|
assert 'Proxy-Authorization' in proxy_info['headers']
|
||||||
|
|
||||||
|
def test_http_bad_auth(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPProxyHandler, username='test', password='test') as server_address:
|
||||||
|
with handler(proxies={ctx.REQUEST_PROTO: f'http://test:bad@{server_address}'}) as rh:
|
||||||
|
with pytest.raises(HTTPError) as exc_info:
|
||||||
|
ctx.proxy_info_request(rh)
|
||||||
|
assert exc_info.value.response.status == 407
|
||||||
|
exc_info.value.response.close()
|
||||||
|
|
||||||
|
def test_http_source_address(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPProxyHandler) as server_address:
|
||||||
|
source_address = f'127.0.0.{random.randint(5, 255)}'
|
||||||
|
verify_address_availability(source_address)
|
||||||
|
with handler(proxies={ctx.REQUEST_PROTO: f'http://{server_address}'},
|
||||||
|
source_address=source_address) as rh:
|
||||||
|
proxy_info = ctx.proxy_info_request(rh)
|
||||||
|
assert proxy_info['proxy'] == server_address
|
||||||
|
assert proxy_info['client_address'][0] == source_address
|
||||||
|
|
||||||
|
@pytest.mark.skip_handler('Urllib', 'urllib does not support https proxies')
|
||||||
|
def test_https(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPSProxyHandler) as server_address:
|
||||||
|
with handler(verify=False, proxies={ctx.REQUEST_PROTO: f'https://{server_address}'}) as rh:
|
||||||
|
proxy_info = ctx.proxy_info_request(rh)
|
||||||
|
assert proxy_info['proxy'] == server_address
|
||||||
|
assert proxy_info['connect'] is False
|
||||||
|
assert 'Proxy-Authorization' not in proxy_info['headers']
|
||||||
|
|
||||||
|
@pytest.mark.skip_handler('Urllib', 'urllib does not support https proxies')
|
||||||
|
def test_https_verify_failed(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPSProxyHandler) as server_address:
|
||||||
|
with handler(verify=True, proxies={ctx.REQUEST_PROTO: f'https://{server_address}'}) as rh:
|
||||||
|
# Accept SSLError as may not be feasible to tell if it is proxy or request error.
|
||||||
|
# note: if request proto also does ssl verification, this may also be the error of the request.
|
||||||
|
# Until we can support passing custom cacerts to handlers, we cannot properly test this for all cases.
|
||||||
|
with pytest.raises((ProxyError, SSLError)):
|
||||||
|
ctx.proxy_info_request(rh)
|
||||||
|
|
||||||
|
def test_http_with_idn(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPProxyHandler) as server_address:
|
||||||
|
with handler(proxies={ctx.REQUEST_PROTO: f'http://{server_address}'}) as rh:
|
||||||
|
proxy_info = ctx.proxy_info_request(rh, target_domain='中文.tw')
|
||||||
|
assert proxy_info['proxy'] == server_address
|
||||||
|
assert proxy_info['path'].startswith('http://xn--fiq228c.tw')
|
||||||
|
assert proxy_info['headers']['Host'].split(':', 1)[0] == 'xn--fiq228c.tw'
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'handler,ctx', [
|
||||||
|
('Requests', 'https'),
|
||||||
|
('CurlCFFI', 'https'),
|
||||||
|
], indirect=True)
|
||||||
|
class TestHTTPConnectProxy:
|
||||||
|
def test_http_connect_no_auth(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPConnectProxyHandler) as server_address:
|
||||||
|
with handler(verify=False, proxies={ctx.REQUEST_PROTO: f'http://{server_address}'}) as rh:
|
||||||
|
proxy_info = ctx.proxy_info_request(rh)
|
||||||
|
assert proxy_info['proxy'] == server_address
|
||||||
|
assert proxy_info['connect'] is True
|
||||||
|
assert 'Proxy-Authorization' not in proxy_info['headers']
|
||||||
|
|
||||||
|
def test_http_connect_auth(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPConnectProxyHandler, username='test', password='test') as server_address:
|
||||||
|
with handler(verify=False, proxies={ctx.REQUEST_PROTO: f'http://test:test@{server_address}'}) as rh:
|
||||||
|
proxy_info = ctx.proxy_info_request(rh)
|
||||||
|
assert proxy_info['proxy'] == server_address
|
||||||
|
assert 'Proxy-Authorization' in proxy_info['headers']
|
||||||
|
|
||||||
|
@pytest.mark.skip_handler(
|
||||||
|
'Requests',
|
||||||
|
'bug in urllib3 causes unclosed socket: https://github.com/urllib3/urllib3/issues/3374',
|
||||||
|
)
|
||||||
|
def test_http_connect_bad_auth(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPConnectProxyHandler, username='test', password='test') as server_address:
|
||||||
|
with handler(verify=False, proxies={ctx.REQUEST_PROTO: f'http://test:bad@{server_address}'}) as rh:
|
||||||
|
with pytest.raises(ProxyError):
|
||||||
|
ctx.proxy_info_request(rh)
|
||||||
|
|
||||||
|
def test_http_connect_source_address(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPConnectProxyHandler) as server_address:
|
||||||
|
source_address = f'127.0.0.{random.randint(5, 255)}'
|
||||||
|
verify_address_availability(source_address)
|
||||||
|
with handler(proxies={ctx.REQUEST_PROTO: f'http://{server_address}'},
|
||||||
|
source_address=source_address,
|
||||||
|
verify=False) as rh:
|
||||||
|
proxy_info = ctx.proxy_info_request(rh)
|
||||||
|
assert proxy_info['proxy'] == server_address
|
||||||
|
assert proxy_info['client_address'][0] == source_address
|
||||||
|
|
||||||
|
@pytest.mark.skipif(urllib3 is None, reason='requires urllib3 to test')
|
||||||
|
def test_https_connect_proxy(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPSConnectProxyHandler) as server_address:
|
||||||
|
with handler(verify=False, proxies={ctx.REQUEST_PROTO: f'https://{server_address}'}) as rh:
|
||||||
|
proxy_info = ctx.proxy_info_request(rh)
|
||||||
|
assert proxy_info['proxy'] == server_address
|
||||||
|
assert proxy_info['connect'] is True
|
||||||
|
assert 'Proxy-Authorization' not in proxy_info['headers']
|
||||||
|
|
||||||
|
@pytest.mark.skipif(urllib3 is None, reason='requires urllib3 to test')
|
||||||
|
def test_https_connect_verify_failed(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPSConnectProxyHandler) as server_address:
|
||||||
|
with handler(verify=True, proxies={ctx.REQUEST_PROTO: f'https://{server_address}'}) as rh:
|
||||||
|
# Accept SSLError as may not be feasible to tell if it is proxy or request error.
|
||||||
|
# note: if request proto also does ssl verification, this may also be the error of the request.
|
||||||
|
# Until we can support passing custom cacerts to handlers, we cannot properly test this for all cases.
|
||||||
|
with pytest.raises((ProxyError, SSLError)):
|
||||||
|
ctx.proxy_info_request(rh)
|
||||||
|
|
||||||
|
@pytest.mark.skipif(urllib3 is None, reason='requires urllib3 to test')
|
||||||
|
def test_https_connect_proxy_auth(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPSConnectProxyHandler, username='test', password='test') as server_address:
|
||||||
|
with handler(verify=False, proxies={ctx.REQUEST_PROTO: f'https://test:test@{server_address}'}) as rh:
|
||||||
|
proxy_info = ctx.proxy_info_request(rh)
|
||||||
|
assert proxy_info['proxy'] == server_address
|
||||||
|
assert 'Proxy-Authorization' in proxy_info['headers']
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,208 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import io
|
||||||
|
import random
|
||||||
|
import ssl
|
||||||
|
|
||||||
|
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||||
|
from yt_dlp.dependencies import certifi
|
||||||
|
from yt_dlp.networking import Response
|
||||||
|
from yt_dlp.networking._helper import (
|
||||||
|
InstanceStoreMixin,
|
||||||
|
add_accept_encoding_header,
|
||||||
|
get_redirect_method,
|
||||||
|
make_socks_proxy_opts,
|
||||||
|
select_proxy,
|
||||||
|
ssl_load_certs,
|
||||||
|
)
|
||||||
|
from yt_dlp.networking.exceptions import (
|
||||||
|
HTTPError,
|
||||||
|
IncompleteRead,
|
||||||
|
)
|
||||||
|
from yt_dlp.socks import ProxyType
|
||||||
|
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||||
|
|
||||||
|
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|
||||||
|
|
||||||
|
class TestNetworkingUtils:
|
||||||
|
|
||||||
|
def test_select_proxy(self):
|
||||||
|
proxies = {
|
||||||
|
'all': 'socks5://example.com',
|
||||||
|
'http': 'http://example.com:1080',
|
||||||
|
'no': 'bypass.example.com,yt-dl.org',
|
||||||
|
}
|
||||||
|
|
||||||
|
assert select_proxy('https://example.com', proxies) == proxies['all']
|
||||||
|
assert select_proxy('http://example.com', proxies) == proxies['http']
|
||||||
|
assert select_proxy('http://bypass.example.com', proxies) is None
|
||||||
|
assert select_proxy('https://yt-dl.org', proxies) is None
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('socks_proxy,expected', [
|
||||||
|
('socks5h://example.com', {
|
||||||
|
'proxytype': ProxyType.SOCKS5,
|
||||||
|
'addr': 'example.com',
|
||||||
|
'port': 1080,
|
||||||
|
'rdns': True,
|
||||||
|
'username': None,
|
||||||
|
'password': None,
|
||||||
|
}),
|
||||||
|
('socks5://user:@example.com:5555', {
|
||||||
|
'proxytype': ProxyType.SOCKS5,
|
||||||
|
'addr': 'example.com',
|
||||||
|
'port': 5555,
|
||||||
|
'rdns': False,
|
||||||
|
'username': 'user',
|
||||||
|
'password': '',
|
||||||
|
}),
|
||||||
|
('socks4://u%40ser:pa%20ss@127.0.0.1:1080', {
|
||||||
|
'proxytype': ProxyType.SOCKS4,
|
||||||
|
'addr': '127.0.0.1',
|
||||||
|
'port': 1080,
|
||||||
|
'rdns': False,
|
||||||
|
'username': 'u@ser',
|
||||||
|
'password': 'pa ss',
|
||||||
|
}),
|
||||||
|
('socks4a://:pa%20ss@127.0.0.1', {
|
||||||
|
'proxytype': ProxyType.SOCKS4A,
|
||||||
|
'addr': '127.0.0.1',
|
||||||
|
'port': 1080,
|
||||||
|
'rdns': True,
|
||||||
|
'username': '',
|
||||||
|
'password': 'pa ss',
|
||||||
|
}),
|
||||||
|
])
|
||||||
|
def test_make_socks_proxy_opts(self, socks_proxy, expected):
|
||||||
|
assert make_socks_proxy_opts(socks_proxy) == expected
|
||||||
|
|
||||||
|
def test_make_socks_proxy_unknown(self):
|
||||||
|
with pytest.raises(ValueError, match='Unknown SOCKS proxy version: socks'):
|
||||||
|
make_socks_proxy_opts('socks://127.0.0.1')
|
||||||
|
|
||||||
|
@pytest.mark.skipif(not certifi, reason='certifi is not installed')
|
||||||
|
def test_load_certifi(self):
|
||||||
|
context_certifi = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||||
|
context_certifi.load_verify_locations(cafile=certifi.where())
|
||||||
|
context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||||
|
ssl_load_certs(context, use_certifi=True)
|
||||||
|
assert context.get_ca_certs() == context_certifi.get_ca_certs()
|
||||||
|
|
||||||
|
context_default = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||||
|
context_default.load_default_certs()
|
||||||
|
context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||||
|
ssl_load_certs(context, use_certifi=False)
|
||||||
|
assert context.get_ca_certs() == context_default.get_ca_certs()
|
||||||
|
|
||||||
|
if context_default.get_ca_certs() == context_certifi.get_ca_certs():
|
||||||
|
pytest.skip('System uses certifi as default. The test is not valid')
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('method,status,expected', [
|
||||||
|
('GET', 303, 'GET'),
|
||||||
|
('HEAD', 303, 'HEAD'),
|
||||||
|
('PUT', 303, 'GET'),
|
||||||
|
('POST', 301, 'GET'),
|
||||||
|
('HEAD', 301, 'HEAD'),
|
||||||
|
('POST', 302, 'GET'),
|
||||||
|
('HEAD', 302, 'HEAD'),
|
||||||
|
('PUT', 302, 'PUT'),
|
||||||
|
('POST', 308, 'POST'),
|
||||||
|
('POST', 307, 'POST'),
|
||||||
|
('HEAD', 308, 'HEAD'),
|
||||||
|
('HEAD', 307, 'HEAD'),
|
||||||
|
])
|
||||||
|
def test_get_redirect_method(self, method, status, expected):
|
||||||
|
assert get_redirect_method(method, status) == expected
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('headers,supported_encodings,expected', [
|
||||||
|
({'Accept-Encoding': 'br'}, ['gzip', 'br'], {'Accept-Encoding': 'br'}),
|
||||||
|
({}, ['gzip', 'br'], {'Accept-Encoding': 'gzip, br'}),
|
||||||
|
({'Content-type': 'application/json'}, [], {'Content-type': 'application/json', 'Accept-Encoding': 'identity'}),
|
||||||
|
])
|
||||||
|
def test_add_accept_encoding_header(self, headers, supported_encodings, expected):
|
||||||
|
headers = HTTPHeaderDict(headers)
|
||||||
|
add_accept_encoding_header(headers, supported_encodings)
|
||||||
|
assert headers == HTTPHeaderDict(expected)
|
||||||
|
|
||||||
|
|
||||||
|
class TestInstanceStoreMixin:
|
||||||
|
|
||||||
|
class FakeInstanceStoreMixin(InstanceStoreMixin):
|
||||||
|
def _create_instance(self, **kwargs):
|
||||||
|
return random.randint(0, 1000000)
|
||||||
|
|
||||||
|
def _close_instance(self, instance):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_mixin(self):
|
||||||
|
mixin = self.FakeInstanceStoreMixin()
|
||||||
|
assert mixin._get_instance(d={'a': 1, 'b': 2, 'c': {'d', 4}}) == mixin._get_instance(d={'a': 1, 'b': 2, 'c': {'d', 4}})
|
||||||
|
|
||||||
|
assert mixin._get_instance(d={'a': 1, 'b': 2, 'c': {'e', 4}}) != mixin._get_instance(d={'a': 1, 'b': 2, 'c': {'d', 4}})
|
||||||
|
|
||||||
|
assert mixin._get_instance(d={'a': 1, 'b': 2, 'c': {'d', 4}} != mixin._get_instance(d={'a': 1, 'b': 2, 'g': {'d', 4}}))
|
||||||
|
|
||||||
|
assert mixin._get_instance(d={'a': 1}, e=[1, 2, 3]) == mixin._get_instance(d={'a': 1}, e=[1, 2, 3])
|
||||||
|
|
||||||
|
assert mixin._get_instance(d={'a': 1}, e=[1, 2, 3]) != mixin._get_instance(d={'a': 1}, e=[1, 2, 3, 4])
|
||||||
|
|
||||||
|
cookiejar = YoutubeDLCookieJar()
|
||||||
|
assert mixin._get_instance(b=[1, 2], c=cookiejar) == mixin._get_instance(b=[1, 2], c=cookiejar)
|
||||||
|
|
||||||
|
assert mixin._get_instance(b=[1, 2], c=cookiejar) != mixin._get_instance(b=[1, 2], c=YoutubeDLCookieJar())
|
||||||
|
|
||||||
|
# Different order
|
||||||
|
assert mixin._get_instance(c=cookiejar, b=[1, 2]) == mixin._get_instance(b=[1, 2], c=cookiejar)
|
||||||
|
|
||||||
|
m = mixin._get_instance(t=1234)
|
||||||
|
assert mixin._get_instance(t=1234) == m
|
||||||
|
mixin._clear_instances()
|
||||||
|
assert mixin._get_instance(t=1234) != m
|
||||||
|
|
||||||
|
|
||||||
|
class TestNetworkingExceptions:
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_response(status):
|
||||||
|
return Response(fp=io.BytesIO(b'test'), url='http://example.com', headers={'tesT': 'test'}, status=status)
|
||||||
|
|
||||||
|
def test_http_error(self):
|
||||||
|
|
||||||
|
response = self.create_response(403)
|
||||||
|
error = HTTPError(response)
|
||||||
|
|
||||||
|
assert error.status == 403
|
||||||
|
assert str(error) == error.msg == 'HTTP Error 403: Forbidden'
|
||||||
|
assert error.reason == response.reason
|
||||||
|
assert error.response is response
|
||||||
|
|
||||||
|
data = error.response.read()
|
||||||
|
assert data == b'test'
|
||||||
|
assert repr(error) == '<HTTPError 403: Forbidden>'
|
||||||
|
|
||||||
|
def test_redirect_http_error(self):
|
||||||
|
response = self.create_response(301)
|
||||||
|
error = HTTPError(response, redirect_loop=True)
|
||||||
|
assert str(error) == error.msg == 'HTTP Error 301: Moved Permanently (redirect loop detected)'
|
||||||
|
assert error.reason == 'Moved Permanently'
|
||||||
|
|
||||||
|
def test_incomplete_read_error(self):
|
||||||
|
error = IncompleteRead(4, 3, cause='test')
|
||||||
|
assert isinstance(error, IncompleteRead)
|
||||||
|
assert repr(error) == '<IncompleteRead: 4 bytes read, 3 more expected>'
|
||||||
|
assert str(error) == error.msg == '4 bytes read, 3 more expected'
|
||||||
|
assert error.partial == 4
|
||||||
|
assert error.expected == 3
|
||||||
|
assert error.cause == 'test'
|
||||||
|
|
||||||
|
error = IncompleteRead(3)
|
||||||
|
assert repr(error) == '<IncompleteRead: 3 bytes read>'
|
||||||
|
assert str(error) == '3 bytes read'
|
@ -0,0 +1,444 @@
|
|||||||
|
import http.cookies
|
||||||
|
import re
|
||||||
|
import xml.etree.ElementTree
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from yt_dlp.utils import dict_get, int_or_none, str_or_none
|
||||||
|
from yt_dlp.utils.traversal import traverse_obj
|
||||||
|
|
||||||
|
_TEST_DATA = {
|
||||||
|
100: 100,
|
||||||
|
1.2: 1.2,
|
||||||
|
'str': 'str',
|
||||||
|
'None': None,
|
||||||
|
'...': ...,
|
||||||
|
'urls': [
|
||||||
|
{'index': 0, 'url': 'https://www.example.com/0'},
|
||||||
|
{'index': 1, 'url': 'https://www.example.com/1'},
|
||||||
|
],
|
||||||
|
'data': (
|
||||||
|
{'index': 2},
|
||||||
|
{'index': 3},
|
||||||
|
),
|
||||||
|
'dict': {},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class TestTraversal:
|
||||||
|
def test_traversal_base(self):
|
||||||
|
assert traverse_obj(_TEST_DATA, ('str',)) == 'str', \
|
||||||
|
'allow tuple path'
|
||||||
|
assert traverse_obj(_TEST_DATA, ['str']) == 'str', \
|
||||||
|
'allow list path'
|
||||||
|
assert traverse_obj(_TEST_DATA, (value for value in ('str',))) == 'str', \
|
||||||
|
'allow iterable path'
|
||||||
|
assert traverse_obj(_TEST_DATA, 'str') == 'str', \
|
||||||
|
'single items should be treated as a path'
|
||||||
|
assert traverse_obj(_TEST_DATA, 100) == 100, \
|
||||||
|
'allow int path'
|
||||||
|
assert traverse_obj(_TEST_DATA, 1.2) == 1.2, \
|
||||||
|
'allow float path'
|
||||||
|
assert traverse_obj(_TEST_DATA, None) == _TEST_DATA, \
|
||||||
|
'`None` should not perform any modification'
|
||||||
|
|
||||||
|
def test_traversal_ellipsis(self):
|
||||||
|
assert traverse_obj(_TEST_DATA, ...) == [x for x in _TEST_DATA.values() if x not in (None, {})], \
|
||||||
|
'`...` should give all non discarded values'
|
||||||
|
assert traverse_obj(_TEST_DATA, ('urls', 0, ...)) == list(_TEST_DATA['urls'][0].values()), \
|
||||||
|
'`...` selection for dicts should select all values'
|
||||||
|
assert traverse_obj(_TEST_DATA, (..., ..., 'url')) == ['https://www.example.com/0', 'https://www.example.com/1'], \
|
||||||
|
'nested `...` queries should work'
|
||||||
|
assert traverse_obj(_TEST_DATA, (..., ..., 'index')) == list(range(4)), \
|
||||||
|
'`...` query result should be flattened'
|
||||||
|
assert traverse_obj(iter(range(4)), ...) == list(range(4)), \
|
||||||
|
'`...` should accept iterables'
|
||||||
|
|
||||||
|
def test_traversal_function(self):
|
||||||
|
filter_func = lambda x, y: x == 'urls' and isinstance(y, list)
|
||||||
|
assert traverse_obj(_TEST_DATA, filter_func) == [_TEST_DATA['urls']], \
|
||||||
|
'function as query key should perform a filter based on (key, value)'
|
||||||
|
assert traverse_obj(_TEST_DATA, lambda _, x: isinstance(x[0], str)) == ['str'], \
|
||||||
|
'exceptions in the query function should be catched'
|
||||||
|
assert traverse_obj(iter(range(4)), lambda _, x: x % 2 == 0) == [0, 2], \
|
||||||
|
'function key should accept iterables'
|
||||||
|
# Wrong function signature should raise (debug mode)
|
||||||
|
with pytest.raises(Exception):
|
||||||
|
traverse_obj(_TEST_DATA, lambda a: ...)
|
||||||
|
with pytest.raises(Exception):
|
||||||
|
traverse_obj(_TEST_DATA, lambda a, b, c: ...)
|
||||||
|
|
||||||
|
def test_traversal_set(self):
|
||||||
|
# transformation/type, like `expected_type`
|
||||||
|
assert traverse_obj(_TEST_DATA, (..., {str.upper})) == ['STR'], \
|
||||||
|
'Function in set should be a transformation'
|
||||||
|
assert traverse_obj(_TEST_DATA, (..., {str})) == ['str'], \
|
||||||
|
'Type in set should be a type filter'
|
||||||
|
assert traverse_obj(_TEST_DATA, (..., {str, int})) == [100, 'str'], \
|
||||||
|
'Multiple types in set should be a type filter'
|
||||||
|
assert traverse_obj(_TEST_DATA, {dict}) == _TEST_DATA, \
|
||||||
|
'A single set should be wrapped into a path'
|
||||||
|
assert traverse_obj(_TEST_DATA, (..., {str.upper})) == ['STR'], \
|
||||||
|
'Transformation function should not raise'
|
||||||
|
expected = [x for x in map(str_or_none, _TEST_DATA.values()) if x is not None]
|
||||||
|
assert traverse_obj(_TEST_DATA, (..., {str_or_none})) == expected, \
|
||||||
|
'Function in set should be a transformation'
|
||||||
|
assert traverse_obj(_TEST_DATA, ('fail', {lambda _: 'const'})) == 'const', \
|
||||||
|
'Function in set should always be called'
|
||||||
|
# Sets with length < 1 or > 1 not including only types should raise
|
||||||
|
with pytest.raises(Exception):
|
||||||
|
traverse_obj(_TEST_DATA, set())
|
||||||
|
with pytest.raises(Exception):
|
||||||
|
traverse_obj(_TEST_DATA, {str.upper, str})
|
||||||
|
|
||||||
|
def test_traversal_slice(self):
|
||||||
|
_SLICE_DATA = [0, 1, 2, 3, 4]
|
||||||
|
|
||||||
|
assert traverse_obj(_TEST_DATA, ('dict', slice(1))) is None, \
|
||||||
|
'slice on a dictionary should not throw'
|
||||||
|
assert traverse_obj(_SLICE_DATA, slice(1)) == _SLICE_DATA[:1], \
|
||||||
|
'slice key should apply slice to sequence'
|
||||||
|
assert traverse_obj(_SLICE_DATA, slice(1, 2)) == _SLICE_DATA[1:2], \
|
||||||
|
'slice key should apply slice to sequence'
|
||||||
|
assert traverse_obj(_SLICE_DATA, slice(1, 4, 2)) == _SLICE_DATA[1:4:2], \
|
||||||
|
'slice key should apply slice to sequence'
|
||||||
|
|
||||||
|
def test_traversal_alternatives(self):
|
||||||
|
assert traverse_obj(_TEST_DATA, 'fail', 'str') == 'str', \
|
||||||
|
'multiple `paths` should be treated as alternative paths'
|
||||||
|
assert traverse_obj(_TEST_DATA, 'str', 100) == 'str', \
|
||||||
|
'alternatives should exit early'
|
||||||
|
assert traverse_obj(_TEST_DATA, 'fail', 'fail') is None, \
|
||||||
|
'alternatives should return `default` if exhausted'
|
||||||
|
assert traverse_obj(_TEST_DATA, (..., 'fail'), 100) == 100, \
|
||||||
|
'alternatives should track their own branching return'
|
||||||
|
assert traverse_obj(_TEST_DATA, ('dict', ...), ('data', ...)) == list(_TEST_DATA['data']), \
|
||||||
|
'alternatives on empty objects should search further'
|
||||||
|
|
||||||
|
def test_traversal_branching_nesting(self):
|
||||||
|
assert traverse_obj(_TEST_DATA, ('urls', (3, 0), 'url')) == ['https://www.example.com/0'], \
|
||||||
|
'tuple as key should be treated as branches'
|
||||||
|
assert traverse_obj(_TEST_DATA, ('urls', [3, 0], 'url')) == ['https://www.example.com/0'], \
|
||||||
|
'list as key should be treated as branches'
|
||||||
|
assert traverse_obj(_TEST_DATA, ('urls', ((1, 'fail'), (0, 'url')))) == ['https://www.example.com/0'], \
|
||||||
|
'double nesting in path should be treated as paths'
|
||||||
|
assert traverse_obj(['0', [1, 2]], [(0, 1), 0]) == [1], \
|
||||||
|
'do not fail early on branching'
|
||||||
|
expected = ['https://www.example.com/0', 'https://www.example.com/1']
|
||||||
|
assert traverse_obj(_TEST_DATA, ('urls', ((0, ('fail', 'url')), (1, 'url')))) == expected, \
|
||||||
|
'tripple nesting in path should be treated as branches'
|
||||||
|
assert traverse_obj(_TEST_DATA, ('urls', ('fail', (..., 'url')))) == expected, \
|
||||||
|
'ellipsis as branch path start gets flattened'
|
||||||
|
|
||||||
|
def test_traversal_dict(self):
|
||||||
|
assert traverse_obj(_TEST_DATA, {0: 100, 1: 1.2}) == {0: 100, 1: 1.2}, \
|
||||||
|
'dict key should result in a dict with the same keys'
|
||||||
|
expected = {0: 'https://www.example.com/0'}
|
||||||
|
assert traverse_obj(_TEST_DATA, {0: ('urls', 0, 'url')}) == expected, \
|
||||||
|
'dict key should allow paths'
|
||||||
|
expected = {0: ['https://www.example.com/0']}
|
||||||
|
assert traverse_obj(_TEST_DATA, {0: ('urls', (3, 0), 'url')}) == expected, \
|
||||||
|
'tuple in dict path should be treated as branches'
|
||||||
|
assert traverse_obj(_TEST_DATA, {0: ('urls', ((1, 'fail'), (0, 'url')))}) == expected, \
|
||||||
|
'double nesting in dict path should be treated as paths'
|
||||||
|
expected = {0: ['https://www.example.com/1', 'https://www.example.com/0']}
|
||||||
|
assert traverse_obj(_TEST_DATA, {0: ('urls', ((1, ('fail', 'url')), (0, 'url')))}) == expected, \
|
||||||
|
'tripple nesting in dict path should be treated as branches'
|
||||||
|
assert traverse_obj(_TEST_DATA, {0: 'fail'}) == {}, \
|
||||||
|
'remove `None` values when top level dict key fails'
|
||||||
|
assert traverse_obj(_TEST_DATA, {0: 'fail'}, default=...) == {0: ...}, \
|
||||||
|
'use `default` if key fails and `default`'
|
||||||
|
assert traverse_obj(_TEST_DATA, {0: 'dict'}) == {}, \
|
||||||
|
'remove empty values when dict key'
|
||||||
|
assert traverse_obj(_TEST_DATA, {0: 'dict'}, default=...) == {0: ...}, \
|
||||||
|
'use `default` when dict key and `default`'
|
||||||
|
assert traverse_obj(_TEST_DATA, {0: {0: 'fail'}}) == {}, \
|
||||||
|
'remove empty values when nested dict key fails'
|
||||||
|
assert traverse_obj(None, {0: 'fail'}) == {}, \
|
||||||
|
'default to dict if pruned'
|
||||||
|
assert traverse_obj(None, {0: 'fail'}, default=...) == {0: ...}, \
|
||||||
|
'default to dict if pruned and default is given'
|
||||||
|
assert traverse_obj(_TEST_DATA, {0: {0: 'fail'}}, default=...) == {0: {0: ...}}, \
|
||||||
|
'use nested `default` when nested dict key fails and `default`'
|
||||||
|
assert traverse_obj(_TEST_DATA, {0: ('dict', ...)}) == {}, \
|
||||||
|
'remove key if branch in dict key not successful'
|
||||||
|
|
||||||
|
def test_traversal_default(self):
|
||||||
|
_DEFAULT_DATA = {'None': None, 'int': 0, 'list': []}
|
||||||
|
|
||||||
|
assert traverse_obj(_DEFAULT_DATA, 'fail') is None, \
|
||||||
|
'default value should be `None`'
|
||||||
|
assert traverse_obj(_DEFAULT_DATA, 'fail', 'fail', default=...) == ..., \
|
||||||
|
'chained fails should result in default'
|
||||||
|
assert traverse_obj(_DEFAULT_DATA, 'None', 'int') == 0, \
|
||||||
|
'should not short cirquit on `None`'
|
||||||
|
assert traverse_obj(_DEFAULT_DATA, 'fail', default=1) == 1, \
|
||||||
|
'invalid dict key should result in `default`'
|
||||||
|
assert traverse_obj(_DEFAULT_DATA, 'None', default=1) == 1, \
|
||||||
|
'`None` is a deliberate sentinel and should become `default`'
|
||||||
|
assert traverse_obj(_DEFAULT_DATA, ('list', 10)) is None, \
|
||||||
|
'`IndexError` should result in `default`'
|
||||||
|
assert traverse_obj(_DEFAULT_DATA, (..., 'fail'), default=1) == 1, \
|
||||||
|
'if branched but not successful return `default` if defined, not `[]`'
|
||||||
|
assert traverse_obj(_DEFAULT_DATA, (..., 'fail'), default=None) is None, \
|
||||||
|
'if branched but not successful return `default` even if `default` is `None`'
|
||||||
|
assert traverse_obj(_DEFAULT_DATA, (..., 'fail')) == [], \
|
||||||
|
'if branched but not successful return `[]`, not `default`'
|
||||||
|
assert traverse_obj(_DEFAULT_DATA, ('list', ...)) == [], \
|
||||||
|
'if branched but object is empty return `[]`, not `default`'
|
||||||
|
assert traverse_obj(None, ...) == [], \
|
||||||
|
'if branched but object is `None` return `[]`, not `default`'
|
||||||
|
assert traverse_obj({0: None}, (0, ...)) == [], \
|
||||||
|
'if branched but state is `None` return `[]`, not `default`'
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('path', [
|
||||||
|
('fail', ...),
|
||||||
|
(..., 'fail'),
|
||||||
|
100 * ('fail',) + (...,),
|
||||||
|
(...,) + 100 * ('fail',),
|
||||||
|
])
|
||||||
|
def test_traversal_branching(self, path):
|
||||||
|
assert traverse_obj({}, path) == [], \
|
||||||
|
'if branched but state is `None`, return `[]` (not `default`)'
|
||||||
|
assert traverse_obj({}, 'fail', path) == [], \
|
||||||
|
'if branching in last alternative and previous did not match, return `[]` (not `default`)'
|
||||||
|
assert traverse_obj({0: 'x'}, 0, path) == 'x', \
|
||||||
|
'if branching in last alternative and previous did match, return single value'
|
||||||
|
assert traverse_obj({0: 'x'}, path, 0) == 'x', \
|
||||||
|
'if branching in first alternative and non-branching path does match, return single value'
|
||||||
|
assert traverse_obj({}, path, 'fail') is None, \
|
||||||
|
'if branching in first alternative and non-branching path does not match, return `default`'
|
||||||
|
|
||||||
|
def test_traversal_expected_type(self):
|
||||||
|
_EXPECTED_TYPE_DATA = {'str': 'str', 'int': 0}
|
||||||
|
|
||||||
|
assert traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=str) == 'str', \
|
||||||
|
'accept matching `expected_type` type'
|
||||||
|
assert traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=int) is None, \
|
||||||
|
'reject non matching `expected_type` type'
|
||||||
|
assert traverse_obj(_EXPECTED_TYPE_DATA, 'int', expected_type=lambda x: str(x)) == '0', \
|
||||||
|
'transform type using type function'
|
||||||
|
assert traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=lambda _: 1 / 0) is None, \
|
||||||
|
'wrap expected_type fuction in try_call'
|
||||||
|
assert traverse_obj(_EXPECTED_TYPE_DATA, ..., expected_type=str) == ['str'], \
|
||||||
|
'eliminate items that expected_type fails on'
|
||||||
|
assert traverse_obj(_TEST_DATA, {0: 100, 1: 1.2}, expected_type=int) == {0: 100}, \
|
||||||
|
'type as expected_type should filter dict values'
|
||||||
|
assert traverse_obj(_TEST_DATA, {0: 100, 1: 1.2, 2: 'None'}, expected_type=str_or_none) == {0: '100', 1: '1.2'}, \
|
||||||
|
'function as expected_type should transform dict values'
|
||||||
|
assert traverse_obj(_TEST_DATA, ({0: 1.2}, 0, {int_or_none}), expected_type=int) == 1, \
|
||||||
|
'expected_type should not filter non final dict values'
|
||||||
|
assert traverse_obj(_TEST_DATA, {0: {0: 100, 1: 'str'}}, expected_type=int) == {0: {0: 100}}, \
|
||||||
|
'expected_type should transform deep dict values'
|
||||||
|
assert traverse_obj(_TEST_DATA, [({0: '...'}, {0: '...'})], expected_type=type(...)) == [{0: ...}, {0: ...}], \
|
||||||
|
'expected_type should transform branched dict values'
|
||||||
|
assert traverse_obj({1: {3: 4}}, [(1, 2), 3], expected_type=int) == [4], \
|
||||||
|
'expected_type regression for type matching in tuple branching'
|
||||||
|
assert traverse_obj(_TEST_DATA, ['data', ...], expected_type=int) == [], \
|
||||||
|
'expected_type regression for type matching in dict result'
|
||||||
|
|
||||||
|
def test_traversal_get_all(self):
|
||||||
|
_GET_ALL_DATA = {'key': [0, 1, 2]}
|
||||||
|
|
||||||
|
assert traverse_obj(_GET_ALL_DATA, ('key', ...), get_all=False) == 0, \
|
||||||
|
'if not `get_all`, return only first matching value'
|
||||||
|
assert traverse_obj(_GET_ALL_DATA, ..., get_all=False) == [0, 1, 2], \
|
||||||
|
'do not overflatten if not `get_all`'
|
||||||
|
|
||||||
|
def test_traversal_casesense(self):
|
||||||
|
_CASESENSE_DATA = {
|
||||||
|
'KeY': 'value0',
|
||||||
|
0: {
|
||||||
|
'KeY': 'value1',
|
||||||
|
0: {'KeY': 'value2'},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
assert traverse_obj(_CASESENSE_DATA, 'key') is None, \
|
||||||
|
'dict keys should be case sensitive unless `casesense`'
|
||||||
|
assert traverse_obj(_CASESENSE_DATA, 'keY', casesense=False) == 'value0', \
|
||||||
|
'allow non matching key case if `casesense`'
|
||||||
|
assert traverse_obj(_CASESENSE_DATA, [0, ('keY',)], casesense=False) == ['value1'], \
|
||||||
|
'allow non matching key case in branch if `casesense`'
|
||||||
|
assert traverse_obj(_CASESENSE_DATA, [0, ([0, 'keY'],)], casesense=False) == ['value2'], \
|
||||||
|
'allow non matching key case in branch path if `casesense`'
|
||||||
|
|
||||||
|
def test_traversal_traverse_string(self):
|
||||||
|
_TRAVERSE_STRING_DATA = {'str': 'str', 1.2: 1.2}
|
||||||
|
|
||||||
|
assert traverse_obj(_TRAVERSE_STRING_DATA, ('str', 0)) is None, \
|
||||||
|
'do not traverse into string if not `traverse_string`'
|
||||||
|
assert traverse_obj(_TRAVERSE_STRING_DATA, ('str', 0), traverse_string=True) == 's', \
|
||||||
|
'traverse into string if `traverse_string`'
|
||||||
|
assert traverse_obj(_TRAVERSE_STRING_DATA, (1.2, 1), traverse_string=True) == '.', \
|
||||||
|
'traverse into converted data if `traverse_string`'
|
||||||
|
assert traverse_obj(_TRAVERSE_STRING_DATA, ('str', ...), traverse_string=True) == 'str', \
|
||||||
|
'`...` should result in string (same value) if `traverse_string`'
|
||||||
|
assert traverse_obj(_TRAVERSE_STRING_DATA, ('str', slice(0, None, 2)), traverse_string=True) == 'sr', \
|
||||||
|
'`slice` should result in string if `traverse_string`'
|
||||||
|
assert traverse_obj(_TRAVERSE_STRING_DATA, ('str', lambda i, v: i or v == 's'), traverse_string=True) == 'str', \
|
||||||
|
'function should result in string if `traverse_string`'
|
||||||
|
assert traverse_obj(_TRAVERSE_STRING_DATA, ('str', (0, 2)), traverse_string=True) == ['s', 'r'], \
|
||||||
|
'branching should result in list if `traverse_string`'
|
||||||
|
assert traverse_obj({}, (0, ...), traverse_string=True) == [], \
|
||||||
|
'branching should result in list if `traverse_string`'
|
||||||
|
assert traverse_obj({}, (0, lambda x, y: True), traverse_string=True) == [], \
|
||||||
|
'branching should result in list if `traverse_string`'
|
||||||
|
assert traverse_obj({}, (0, slice(1)), traverse_string=True) == [], \
|
||||||
|
'branching should result in list if `traverse_string`'
|
||||||
|
|
||||||
|
def test_traversal_re(self):
|
||||||
|
mobj = re.fullmatch(r'0(12)(?P<group>3)(4)?', '0123')
|
||||||
|
assert traverse_obj(mobj, ...) == [x for x in mobj.groups() if x is not None], \
|
||||||
|
'`...` on a `re.Match` should give its `groups()`'
|
||||||
|
assert traverse_obj(mobj, lambda k, _: k in (0, 2)) == ['0123', '3'], \
|
||||||
|
'function on a `re.Match` should give groupno, value starting at 0'
|
||||||
|
assert traverse_obj(mobj, 'group') == '3', \
|
||||||
|
'str key on a `re.Match` should give group with that name'
|
||||||
|
assert traverse_obj(mobj, 2) == '3', \
|
||||||
|
'int key on a `re.Match` should give group with that name'
|
||||||
|
assert traverse_obj(mobj, 'gRoUp', casesense=False) == '3', \
|
||||||
|
'str key on a `re.Match` should respect casesense'
|
||||||
|
assert traverse_obj(mobj, 'fail') is None, \
|
||||||
|
'failing str key on a `re.Match` should return `default`'
|
||||||
|
assert traverse_obj(mobj, 'gRoUpS', casesense=False) is None, \
|
||||||
|
'failing str key on a `re.Match` should return `default`'
|
||||||
|
assert traverse_obj(mobj, 8) is None, \
|
||||||
|
'failing int key on a `re.Match` should return `default`'
|
||||||
|
assert traverse_obj(mobj, lambda k, _: k in (0, 'group')) == ['0123', '3'], \
|
||||||
|
'function on a `re.Match` should give group name as well'
|
||||||
|
|
||||||
|
def test_traversal_xml_etree(self):
|
||||||
|
etree = xml.etree.ElementTree.fromstring('''<?xml version="1.0"?>
|
||||||
|
<data>
|
||||||
|
<country name="Liechtenstein">
|
||||||
|
<rank>1</rank>
|
||||||
|
<year>2008</year>
|
||||||
|
<gdppc>141100</gdppc>
|
||||||
|
<neighbor name="Austria" direction="E"/>
|
||||||
|
<neighbor name="Switzerland" direction="W"/>
|
||||||
|
</country>
|
||||||
|
<country name="Singapore">
|
||||||
|
<rank>4</rank>
|
||||||
|
<year>2011</year>
|
||||||
|
<gdppc>59900</gdppc>
|
||||||
|
<neighbor name="Malaysia" direction="N"/>
|
||||||
|
</country>
|
||||||
|
<country name="Panama">
|
||||||
|
<rank>68</rank>
|
||||||
|
<year>2011</year>
|
||||||
|
<gdppc>13600</gdppc>
|
||||||
|
<neighbor name="Costa Rica" direction="W"/>
|
||||||
|
<neighbor name="Colombia" direction="E"/>
|
||||||
|
</country>
|
||||||
|
</data>''')
|
||||||
|
assert traverse_obj(etree, '') == etree, \
|
||||||
|
'empty str key should return the element itself'
|
||||||
|
assert traverse_obj(etree, 'country') == list(etree), \
|
||||||
|
'str key should lead all children with that tag name'
|
||||||
|
assert traverse_obj(etree, ...) == list(etree), \
|
||||||
|
'`...` as key should return all children'
|
||||||
|
assert traverse_obj(etree, lambda _, x: x[0].text == '4') == [etree[1]], \
|
||||||
|
'function as key should get element as value'
|
||||||
|
assert traverse_obj(etree, lambda i, _: i == 1) == [etree[1]], \
|
||||||
|
'function as key should get index as key'
|
||||||
|
assert traverse_obj(etree, 0) == etree[0], \
|
||||||
|
'int key should return the nth child'
|
||||||
|
expected = ['Austria', 'Switzerland', 'Malaysia', 'Costa Rica', 'Colombia']
|
||||||
|
assert traverse_obj(etree, './/neighbor/@name') == expected, \
|
||||||
|
'`@<attribute>` at end of path should give that attribute'
|
||||||
|
assert traverse_obj(etree, '//neighbor/@fail') == [None, None, None, None, None], \
|
||||||
|
'`@<nonexistant>` at end of path should give `None`'
|
||||||
|
assert traverse_obj(etree, ('//neighbor/@', 2)) == {'name': 'Malaysia', 'direction': 'N'}, \
|
||||||
|
'`@` should give the full attribute dict'
|
||||||
|
assert traverse_obj(etree, '//year/text()') == ['2008', '2011', '2011'], \
|
||||||
|
'`text()` at end of path should give the inner text'
|
||||||
|
assert traverse_obj(etree, '//*[@direction]/@direction') == ['E', 'W', 'N', 'W', 'E'], \
|
||||||
|
'full Python xpath features should be supported'
|
||||||
|
assert traverse_obj(etree, (0, '@name')) == 'Liechtenstein', \
|
||||||
|
'special transformations should act on current element'
|
||||||
|
assert traverse_obj(etree, ('country', 0, ..., 'text()', {int_or_none})) == [1, 2008, 141100], \
|
||||||
|
'special transformations should act on current element'
|
||||||
|
|
||||||
|
def test_traversal_unbranching(self):
|
||||||
|
assert traverse_obj(_TEST_DATA, [(100, 1.2), all]) == [100, 1.2], \
|
||||||
|
'`all` should give all results as list'
|
||||||
|
assert traverse_obj(_TEST_DATA, [(100, 1.2), any]) == 100, \
|
||||||
|
'`any` should give the first result'
|
||||||
|
assert traverse_obj(_TEST_DATA, [100, all]) == [100], \
|
||||||
|
'`all` should give list if non branching'
|
||||||
|
assert traverse_obj(_TEST_DATA, [100, any]) == 100, \
|
||||||
|
'`any` should give single item if non branching'
|
||||||
|
assert traverse_obj(_TEST_DATA, [('dict', 'None', 100), all]) == [100], \
|
||||||
|
'`all` should filter `None` and empty dict'
|
||||||
|
assert traverse_obj(_TEST_DATA, [('dict', 'None', 100), any]) == 100, \
|
||||||
|
'`any` should filter `None` and empty dict'
|
||||||
|
assert traverse_obj(_TEST_DATA, [{
|
||||||
|
'all': [('dict', 'None', 100, 1.2), all],
|
||||||
|
'any': [('dict', 'None', 100, 1.2), any],
|
||||||
|
}]) == {'all': [100, 1.2], 'any': 100}, \
|
||||||
|
'`all`/`any` should apply to each dict path separately'
|
||||||
|
assert traverse_obj(_TEST_DATA, [{
|
||||||
|
'all': [('dict', 'None', 100, 1.2), all],
|
||||||
|
'any': [('dict', 'None', 100, 1.2), any],
|
||||||
|
}], get_all=False) == {'all': [100, 1.2], 'any': 100}, \
|
||||||
|
'`all`/`any` should apply to dict regardless of `get_all`'
|
||||||
|
assert traverse_obj(_TEST_DATA, [('dict', 'None', 100, 1.2), all, {float}]) is None, \
|
||||||
|
'`all` should reset branching status'
|
||||||
|
assert traverse_obj(_TEST_DATA, [('dict', 'None', 100, 1.2), any, {float}]) is None, \
|
||||||
|
'`any` should reset branching status'
|
||||||
|
assert traverse_obj(_TEST_DATA, [('dict', 'None', 100, 1.2), all, ..., {float}]) == [1.2], \
|
||||||
|
'`all` should allow further branching'
|
||||||
|
assert traverse_obj(_TEST_DATA, [('dict', 'None', 'urls', 'data'), any, ..., 'index']) == [0, 1], \
|
||||||
|
'`any` should allow further branching'
|
||||||
|
|
||||||
|
def test_traversal_morsel(self):
|
||||||
|
values = {
|
||||||
|
'expires': 'a',
|
||||||
|
'path': 'b',
|
||||||
|
'comment': 'c',
|
||||||
|
'domain': 'd',
|
||||||
|
'max-age': 'e',
|
||||||
|
'secure': 'f',
|
||||||
|
'httponly': 'g',
|
||||||
|
'version': 'h',
|
||||||
|
'samesite': 'i',
|
||||||
|
}
|
||||||
|
morsel = http.cookies.Morsel()
|
||||||
|
morsel.set('item_key', 'item_value', 'coded_value')
|
||||||
|
morsel.update(values)
|
||||||
|
values['key'] = 'item_key'
|
||||||
|
values['value'] = 'item_value'
|
||||||
|
|
||||||
|
for key, value in values.items():
|
||||||
|
assert traverse_obj(morsel, key) == value, \
|
||||||
|
'Morsel should provide access to all values'
|
||||||
|
assert traverse_obj(morsel, ...) == list(values.values()), \
|
||||||
|
'`...` should yield all values'
|
||||||
|
assert traverse_obj(morsel, lambda k, v: True) == list(values.values()), \
|
||||||
|
'function key should yield all values'
|
||||||
|
assert traverse_obj(morsel, [(None,), any]) == morsel, \
|
||||||
|
'Morsel should not be implicitly changed to dict on usage'
|
||||||
|
|
||||||
|
|
||||||
|
class TestDictGet:
|
||||||
|
def test_dict_get(self):
|
||||||
|
FALSE_VALUES = {
|
||||||
|
'none': None,
|
||||||
|
'false': False,
|
||||||
|
'zero': 0,
|
||||||
|
'empty_string': '',
|
||||||
|
'empty_list': [],
|
||||||
|
}
|
||||||
|
d = {**FALSE_VALUES, 'a': 42}
|
||||||
|
assert dict_get(d, 'a') == 42
|
||||||
|
assert dict_get(d, 'b') is None
|
||||||
|
assert dict_get(d, 'b', 42) == 42
|
||||||
|
assert dict_get(d, ('a',)) == 42
|
||||||
|
assert dict_get(d, ('b', 'a')) == 42
|
||||||
|
assert dict_get(d, ('b', 'c', 'a', 'd')) == 42
|
||||||
|
assert dict_get(d, ('b', 'c')) is None
|
||||||
|
assert dict_get(d, ('b', 'c'), 42) == 42
|
||||||
|
for key, false_value in FALSE_VALUES.items():
|
||||||
|
assert dict_get(d, ('b', 'c', key)) is None
|
||||||
|
assert dict_get(d, ('b', 'c', key), skip_false_values=False) == false_value
|
@ -0,0 +1,228 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
|
from test.helper import FakeYDL, report_warning
|
||||||
|
from yt_dlp.update import UpdateInfo, Updater
|
||||||
|
|
||||||
|
|
||||||
|
# XXX: Keep in sync with yt_dlp.update.UPDATE_SOURCES
|
||||||
|
TEST_UPDATE_SOURCES = {
|
||||||
|
'stable': 'yt-dlp/yt-dlp',
|
||||||
|
'nightly': 'yt-dlp/yt-dlp-nightly-builds',
|
||||||
|
'master': 'yt-dlp/yt-dlp-master-builds',
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_API_DATA = {
|
||||||
|
'yt-dlp/yt-dlp/latest': {
|
||||||
|
'tag_name': '2023.12.31',
|
||||||
|
'target_commitish': 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb',
|
||||||
|
'name': 'yt-dlp 2023.12.31',
|
||||||
|
'body': 'BODY',
|
||||||
|
},
|
||||||
|
'yt-dlp/yt-dlp-nightly-builds/latest': {
|
||||||
|
'tag_name': '2023.12.31.123456',
|
||||||
|
'target_commitish': 'master',
|
||||||
|
'name': 'yt-dlp nightly 2023.12.31.123456',
|
||||||
|
'body': 'Generated from: https://github.com/yt-dlp/yt-dlp/commit/cccccccccccccccccccccccccccccccccccccccc',
|
||||||
|
},
|
||||||
|
'yt-dlp/yt-dlp-master-builds/latest': {
|
||||||
|
'tag_name': '2023.12.31.987654',
|
||||||
|
'target_commitish': 'master',
|
||||||
|
'name': 'yt-dlp master 2023.12.31.987654',
|
||||||
|
'body': 'Generated from: https://github.com/yt-dlp/yt-dlp/commit/dddddddddddddddddddddddddddddddddddddddd',
|
||||||
|
},
|
||||||
|
'yt-dlp/yt-dlp/tags/testing': {
|
||||||
|
'tag_name': 'testing',
|
||||||
|
'target_commitish': '9999999999999999999999999999999999999999',
|
||||||
|
'name': 'testing',
|
||||||
|
'body': 'BODY',
|
||||||
|
},
|
||||||
|
'fork/yt-dlp/latest': {
|
||||||
|
'tag_name': '2050.12.31',
|
||||||
|
'target_commitish': 'eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee',
|
||||||
|
'name': '2050.12.31',
|
||||||
|
'body': 'BODY',
|
||||||
|
},
|
||||||
|
'fork/yt-dlp/tags/pr0000': {
|
||||||
|
'tag_name': 'pr0000',
|
||||||
|
'target_commitish': 'ffffffffffffffffffffffffffffffffffffffff',
|
||||||
|
'name': 'pr1234 2023.11.11.000000',
|
||||||
|
'body': 'BODY',
|
||||||
|
},
|
||||||
|
'fork/yt-dlp/tags/pr1234': {
|
||||||
|
'tag_name': 'pr1234',
|
||||||
|
'target_commitish': '0000000000000000000000000000000000000000',
|
||||||
|
'name': 'pr1234 2023.12.31.555555',
|
||||||
|
'body': 'BODY',
|
||||||
|
},
|
||||||
|
'fork/yt-dlp/tags/pr9999': {
|
||||||
|
'tag_name': 'pr9999',
|
||||||
|
'target_commitish': '1111111111111111111111111111111111111111',
|
||||||
|
'name': 'pr9999',
|
||||||
|
'body': 'BODY',
|
||||||
|
},
|
||||||
|
'fork/yt-dlp-satellite/tags/pr987': {
|
||||||
|
'tag_name': 'pr987',
|
||||||
|
'target_commitish': 'master',
|
||||||
|
'name': 'pr987',
|
||||||
|
'body': 'Generated from: https://github.com/yt-dlp/yt-dlp/commit/2222222222222222222222222222222222222222',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_LOCKFILE_COMMENT = '# This file is used for regulating self-update'
|
||||||
|
|
||||||
|
TEST_LOCKFILE_V1 = rf'''{TEST_LOCKFILE_COMMENT}
|
||||||
|
lock 2022.08.18.36 .+ Python 3\.6
|
||||||
|
lock 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
||||||
|
lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
|
'''
|
||||||
|
|
||||||
|
TEST_LOCKFILE_V2_TMPL = r'''%s
|
||||||
|
lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3\.6
|
||||||
|
lockV2 yt-dlp/yt-dlp 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
||||||
|
lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 (?!win_x86_exe).+ Python 3\.7
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 (?!win_x86_exe).+ Python 3\.7
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
|
'''
|
||||||
|
|
||||||
|
TEST_LOCKFILE_V2 = TEST_LOCKFILE_V2_TMPL % TEST_LOCKFILE_COMMENT
|
||||||
|
|
||||||
|
TEST_LOCKFILE_ACTUAL = TEST_LOCKFILE_V2_TMPL % TEST_LOCKFILE_V1.rstrip('\n')
|
||||||
|
|
||||||
|
TEST_LOCKFILE_FORK = rf'''{TEST_LOCKFILE_ACTUAL}# Test if a fork blocks updates to non-numeric tags
|
||||||
|
lockV2 fork/yt-dlp pr0000 .+ Python 3.6
|
||||||
|
lockV2 fork/yt-dlp pr1234 (?!win_x86_exe).+ Python 3\.7
|
||||||
|
lockV2 fork/yt-dlp pr1234 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
|
lockV2 fork/yt-dlp pr9999 .+ Python 3.11
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
class FakeUpdater(Updater):
|
||||||
|
current_version = '2022.01.01'
|
||||||
|
current_commit = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
|
||||||
|
|
||||||
|
_channel = 'stable'
|
||||||
|
_origin = 'yt-dlp/yt-dlp'
|
||||||
|
_update_sources = TEST_UPDATE_SOURCES
|
||||||
|
|
||||||
|
def _download_update_spec(self, *args, **kwargs):
|
||||||
|
return TEST_LOCKFILE_ACTUAL
|
||||||
|
|
||||||
|
def _call_api(self, tag):
|
||||||
|
tag = f'tags/{tag}' if tag != 'latest' else tag
|
||||||
|
return TEST_API_DATA[f'{self.requested_repo}/{tag}']
|
||||||
|
|
||||||
|
def _report_error(self, msg, *args, **kwargs):
|
||||||
|
report_warning(msg)
|
||||||
|
|
||||||
|
|
||||||
|
class TestUpdate(unittest.TestCase):
|
||||||
|
maxDiff = None
|
||||||
|
|
||||||
|
def test_update_spec(self):
|
||||||
|
ydl = FakeYDL()
|
||||||
|
updater = FakeUpdater(ydl, 'stable')
|
||||||
|
|
||||||
|
def test(lockfile, identifier, input_tag, expect_tag, exact=False, repo='yt-dlp/yt-dlp'):
|
||||||
|
updater._identifier = identifier
|
||||||
|
updater._exact = exact
|
||||||
|
updater.requested_repo = repo
|
||||||
|
result = updater._process_update_spec(lockfile, input_tag)
|
||||||
|
self.assertEqual(
|
||||||
|
result, expect_tag,
|
||||||
|
f'{identifier!r} requesting {repo}@{input_tag} (exact={exact}) '
|
||||||
|
f'returned {result!r} instead of {expect_tag!r}')
|
||||||
|
|
||||||
|
for lockfile in (TEST_LOCKFILE_V1, TEST_LOCKFILE_V2, TEST_LOCKFILE_ACTUAL, TEST_LOCKFILE_FORK):
|
||||||
|
# Normal operation
|
||||||
|
test(lockfile, 'zip Python 3.12.0', '2023.12.31', '2023.12.31')
|
||||||
|
test(lockfile, 'zip stable Python 3.12.0', '2023.12.31', '2023.12.31', exact=True)
|
||||||
|
# Python 3.6 --update should update only to its lock
|
||||||
|
test(lockfile, 'zip Python 3.6.0', '2023.11.16', '2022.08.18.36')
|
||||||
|
# --update-to an exact version later than the lock should return None
|
||||||
|
test(lockfile, 'zip stable Python 3.6.0', '2023.11.16', None, exact=True)
|
||||||
|
# Python 3.7 should be able to update to its lock
|
||||||
|
test(lockfile, 'zip Python 3.7.0', '2023.11.16', '2023.11.16')
|
||||||
|
test(lockfile, 'zip stable Python 3.7.1', '2023.11.16', '2023.11.16', exact=True)
|
||||||
|
# Non-win_x86_exe builds on py3.7 must be locked
|
||||||
|
test(lockfile, 'zip Python 3.7.1', '2023.12.31', '2023.11.16')
|
||||||
|
test(lockfile, 'zip stable Python 3.7.1', '2023.12.31', None, exact=True)
|
||||||
|
test( # Windows Vista w/ win_x86_exe must be locked
|
||||||
|
lockfile, 'win_x86_exe stable Python 3.7.9 (CPython x86 32bit) - Windows-Vista-6.0.6003-SP2',
|
||||||
|
'2023.12.31', '2023.11.16')
|
||||||
|
test( # Windows 2008Server w/ win_x86_exe must be locked
|
||||||
|
lockfile, 'win_x86_exe Python 3.7.9 (CPython x86 32bit) - Windows-2008Server',
|
||||||
|
'2023.12.31', None, exact=True)
|
||||||
|
test( # Windows 7 w/ win_x86_exe py3.7 build should be able to update beyond lock
|
||||||
|
lockfile, 'win_x86_exe stable Python 3.7.9 (CPython x86 32bit) - Windows-7-6.1.7601-SP1',
|
||||||
|
'2023.12.31', '2023.12.31')
|
||||||
|
test( # Windows 8.1 w/ '2008Server' in platform string should be able to update beyond lock
|
||||||
|
lockfile, 'win_x86_exe Python 3.7.9 (CPython x86 32bit) - Windows-post2008Server-6.2.9200',
|
||||||
|
'2023.12.31', '2023.12.31', exact=True)
|
||||||
|
|
||||||
|
# Forks can block updates to non-numeric tags rather than lock
|
||||||
|
test(TEST_LOCKFILE_FORK, 'zip Python 3.6.3', 'pr0000', None, repo='fork/yt-dlp')
|
||||||
|
test(TEST_LOCKFILE_FORK, 'zip stable Python 3.7.4', 'pr0000', 'pr0000', repo='fork/yt-dlp')
|
||||||
|
test(TEST_LOCKFILE_FORK, 'zip stable Python 3.7.4', 'pr1234', None, repo='fork/yt-dlp')
|
||||||
|
test(TEST_LOCKFILE_FORK, 'zip Python 3.8.1', 'pr1234', 'pr1234', repo='fork/yt-dlp', exact=True)
|
||||||
|
test(
|
||||||
|
TEST_LOCKFILE_FORK, 'win_x86_exe stable Python 3.7.9 (CPython x86 32bit) - Windows-Vista-6.0.6003-SP2',
|
||||||
|
'pr1234', None, repo='fork/yt-dlp')
|
||||||
|
test(
|
||||||
|
TEST_LOCKFILE_FORK, 'win_x86_exe stable Python 3.7.9 (CPython x86 32bit) - Windows-7-6.1.7601-SP1',
|
||||||
|
'2023.12.31', '2023.12.31', repo='fork/yt-dlp')
|
||||||
|
test(TEST_LOCKFILE_FORK, 'zip Python 3.11.2', 'pr9999', None, repo='fork/yt-dlp', exact=True)
|
||||||
|
test(TEST_LOCKFILE_FORK, 'zip stable Python 3.12.0', 'pr9999', 'pr9999', repo='fork/yt-dlp')
|
||||||
|
|
||||||
|
def test_query_update(self):
|
||||||
|
ydl = FakeYDL()
|
||||||
|
|
||||||
|
def test(target, expected, current_version=None, current_commit=None, identifier=None):
|
||||||
|
updater = FakeUpdater(ydl, target)
|
||||||
|
if current_version:
|
||||||
|
updater.current_version = current_version
|
||||||
|
if current_commit:
|
||||||
|
updater.current_commit = current_commit
|
||||||
|
updater._identifier = identifier or 'zip'
|
||||||
|
update_info = updater.query_update(_output=True)
|
||||||
|
self.assertDictEqual(
|
||||||
|
update_info.__dict__ if update_info else {}, expected.__dict__ if expected else {})
|
||||||
|
|
||||||
|
test('yt-dlp/yt-dlp@latest', UpdateInfo(
|
||||||
|
'2023.12.31', version='2023.12.31', requested_version='2023.12.31', commit='b' * 40))
|
||||||
|
test('yt-dlp/yt-dlp-nightly-builds@latest', UpdateInfo(
|
||||||
|
'2023.12.31.123456', version='2023.12.31.123456', requested_version='2023.12.31.123456', commit='c' * 40))
|
||||||
|
test('yt-dlp/yt-dlp-master-builds@latest', UpdateInfo(
|
||||||
|
'2023.12.31.987654', version='2023.12.31.987654', requested_version='2023.12.31.987654', commit='d' * 40))
|
||||||
|
test('fork/yt-dlp@latest', UpdateInfo(
|
||||||
|
'2050.12.31', version='2050.12.31', requested_version='2050.12.31', commit='e' * 40))
|
||||||
|
test('fork/yt-dlp@pr0000', UpdateInfo(
|
||||||
|
'pr0000', version='2023.11.11.000000', requested_version='2023.11.11.000000', commit='f' * 40))
|
||||||
|
test('fork/yt-dlp@pr1234', UpdateInfo(
|
||||||
|
'pr1234', version='2023.12.31.555555', requested_version='2023.12.31.555555', commit='0' * 40))
|
||||||
|
test('fork/yt-dlp@pr9999', UpdateInfo(
|
||||||
|
'pr9999', version=None, requested_version=None, commit='1' * 40))
|
||||||
|
test('fork/yt-dlp-satellite@pr987', UpdateInfo(
|
||||||
|
'pr987', version=None, requested_version=None, commit='2' * 40))
|
||||||
|
test('yt-dlp/yt-dlp', None, current_version='2024.01.01')
|
||||||
|
test('stable', UpdateInfo(
|
||||||
|
'2023.12.31', version='2023.12.31', requested_version='2023.12.31', commit='b' * 40))
|
||||||
|
test('nightly', UpdateInfo(
|
||||||
|
'2023.12.31.123456', version='2023.12.31.123456', requested_version='2023.12.31.123456', commit='c' * 40))
|
||||||
|
test('master', UpdateInfo(
|
||||||
|
'2023.12.31.987654', version='2023.12.31.987654', requested_version='2023.12.31.987654', commit='d' * 40))
|
||||||
|
test('testing', None, current_commit='9' * 40)
|
||||||
|
test('testing', UpdateInfo('testing', commit='9' * 40))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
@ -1,30 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# Allow direct execution
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
from yt_dlp.update import rsa_verify
|
|
||||||
|
|
||||||
|
|
||||||
class TestUpdate(unittest.TestCase):
|
|
||||||
def test_rsa_verify(self):
|
|
||||||
UPDATES_RSA_KEY = (0x9d60ee4d8f805312fdb15a62f87b95bd66177b91df176765d13514a0f1754bcd2057295c5b6f1d35daa6742c3ffc9a82d3e118861c207995a8031e151d863c9927e304576bc80692bc8e094896fcf11b66f3e29e04e3a71e9a11558558acea1840aec37fc396fb6b65dc81a1c4144e03bd1c011de62e3f1357b327d08426fe93, 65537)
|
|
||||||
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'versions.json'), 'rb') as f:
|
|
||||||
versions_info = f.read().decode()
|
|
||||||
versions_info = json.loads(versions_info)
|
|
||||||
signature = versions_info['signature']
|
|
||||||
del versions_info['signature']
|
|
||||||
self.assertTrue(rsa_verify(
|
|
||||||
json.dumps(versions_info, sort_keys=True).encode(),
|
|
||||||
signature, UPDATES_RSA_KEY))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
@ -0,0 +1,439 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from test.helper import verify_address_availability
|
||||||
|
from yt_dlp.networking.common import Features, DEFAULT_TIMEOUT
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import http.client
|
||||||
|
import http.cookiejar
|
||||||
|
import http.server
|
||||||
|
import json
|
||||||
|
import random
|
||||||
|
import ssl
|
||||||
|
import threading
|
||||||
|
|
||||||
|
from yt_dlp import socks, traverse_obj
|
||||||
|
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||||
|
from yt_dlp.dependencies import websockets
|
||||||
|
from yt_dlp.networking import Request
|
||||||
|
from yt_dlp.networking.exceptions import (
|
||||||
|
CertificateVerifyError,
|
||||||
|
HTTPError,
|
||||||
|
ProxyError,
|
||||||
|
RequestError,
|
||||||
|
SSLError,
|
||||||
|
TransportError,
|
||||||
|
)
|
||||||
|
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||||
|
|
||||||
|
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|
||||||
|
|
||||||
|
def websocket_handler(websocket):
|
||||||
|
for message in websocket:
|
||||||
|
if isinstance(message, bytes):
|
||||||
|
if message == b'bytes':
|
||||||
|
return websocket.send('2')
|
||||||
|
elif isinstance(message, str):
|
||||||
|
if message == 'headers':
|
||||||
|
return websocket.send(json.dumps(dict(websocket.request.headers)))
|
||||||
|
elif message == 'path':
|
||||||
|
return websocket.send(websocket.request.path)
|
||||||
|
elif message == 'source_address':
|
||||||
|
return websocket.send(websocket.remote_address[0])
|
||||||
|
elif message == 'str':
|
||||||
|
return websocket.send('1')
|
||||||
|
return websocket.send(message)
|
||||||
|
|
||||||
|
|
||||||
|
def process_request(self, request):
|
||||||
|
if request.path.startswith('/gen_'):
|
||||||
|
status = http.HTTPStatus(int(request.path[5:]))
|
||||||
|
if 300 <= status.value <= 300:
|
||||||
|
return websockets.http11.Response(
|
||||||
|
status.value, status.phrase, websockets.datastructures.Headers([('Location', '/')]), b'')
|
||||||
|
return self.protocol.reject(status.value, status.phrase)
|
||||||
|
return self.protocol.accept(request)
|
||||||
|
|
||||||
|
|
||||||
|
def create_websocket_server(**ws_kwargs):
|
||||||
|
import websockets.sync.server
|
||||||
|
wsd = websockets.sync.server.serve(
|
||||||
|
websocket_handler, '127.0.0.1', 0,
|
||||||
|
process_request=process_request, open_timeout=2, **ws_kwargs)
|
||||||
|
ws_port = wsd.socket.getsockname()[1]
|
||||||
|
ws_server_thread = threading.Thread(target=wsd.serve_forever)
|
||||||
|
ws_server_thread.daemon = True
|
||||||
|
ws_server_thread.start()
|
||||||
|
return ws_server_thread, ws_port
|
||||||
|
|
||||||
|
|
||||||
|
def create_ws_websocket_server():
|
||||||
|
return create_websocket_server()
|
||||||
|
|
||||||
|
|
||||||
|
def create_wss_websocket_server():
|
||||||
|
certfn = os.path.join(TEST_DIR, 'testcert.pem')
|
||||||
|
sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||||
|
sslctx.load_cert_chain(certfn, None)
|
||||||
|
return create_websocket_server(ssl_context=sslctx)
|
||||||
|
|
||||||
|
|
||||||
|
MTLS_CERT_DIR = os.path.join(TEST_DIR, 'testdata', 'certificate')
|
||||||
|
|
||||||
|
|
||||||
|
def create_mtls_wss_websocket_server():
|
||||||
|
certfn = os.path.join(TEST_DIR, 'testcert.pem')
|
||||||
|
cacertfn = os.path.join(MTLS_CERT_DIR, 'ca.crt')
|
||||||
|
|
||||||
|
sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||||
|
sslctx.verify_mode = ssl.CERT_REQUIRED
|
||||||
|
sslctx.load_verify_locations(cafile=cacertfn)
|
||||||
|
sslctx.load_cert_chain(certfn, None)
|
||||||
|
|
||||||
|
return create_websocket_server(ssl_context=sslctx)
|
||||||
|
|
||||||
|
|
||||||
|
def ws_validate_and_send(rh, req):
|
||||||
|
rh.validate(req)
|
||||||
|
max_tries = 3
|
||||||
|
for i in range(max_tries):
|
||||||
|
try:
|
||||||
|
return rh.send(req)
|
||||||
|
except TransportError as e:
|
||||||
|
if i < (max_tries - 1) and 'connection closed during handshake' in str(e):
|
||||||
|
# websockets server sometimes hangs on new connections
|
||||||
|
continue
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(not websockets, reason='websockets must be installed to test websocket request handlers')
|
||||||
|
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||||
|
class TestWebsSocketRequestHandlerConformance:
|
||||||
|
@classmethod
|
||||||
|
def setup_class(cls):
|
||||||
|
cls.ws_thread, cls.ws_port = create_ws_websocket_server()
|
||||||
|
cls.ws_base_url = f'ws://127.0.0.1:{cls.ws_port}'
|
||||||
|
|
||||||
|
cls.wss_thread, cls.wss_port = create_wss_websocket_server()
|
||||||
|
cls.wss_base_url = f'wss://127.0.0.1:{cls.wss_port}'
|
||||||
|
|
||||||
|
cls.bad_wss_thread, cls.bad_wss_port = create_websocket_server(ssl_context=ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER))
|
||||||
|
cls.bad_wss_host = f'wss://127.0.0.1:{cls.bad_wss_port}'
|
||||||
|
|
||||||
|
cls.mtls_wss_thread, cls.mtls_wss_port = create_mtls_wss_websocket_server()
|
||||||
|
cls.mtls_wss_base_url = f'wss://127.0.0.1:{cls.mtls_wss_port}'
|
||||||
|
|
||||||
|
def test_basic_websockets(self, handler):
|
||||||
|
with handler() as rh:
|
||||||
|
ws = ws_validate_and_send(rh, Request(self.ws_base_url))
|
||||||
|
assert 'upgrade' in ws.headers
|
||||||
|
assert ws.status == 101
|
||||||
|
ws.send('foo')
|
||||||
|
assert ws.recv() == 'foo'
|
||||||
|
ws.close()
|
||||||
|
|
||||||
|
# https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
|
||||||
|
@pytest.mark.parametrize('msg,opcode', [('str', 1), (b'bytes', 2)])
|
||||||
|
def test_send_types(self, handler, msg, opcode):
|
||||||
|
with handler() as rh:
|
||||||
|
ws = ws_validate_and_send(rh, Request(self.ws_base_url))
|
||||||
|
ws.send(msg)
|
||||||
|
assert int(ws.recv()) == opcode
|
||||||
|
ws.close()
|
||||||
|
|
||||||
|
def test_verify_cert(self, handler):
|
||||||
|
with handler() as rh:
|
||||||
|
with pytest.raises(CertificateVerifyError):
|
||||||
|
ws_validate_and_send(rh, Request(self.wss_base_url))
|
||||||
|
|
||||||
|
with handler(verify=False) as rh:
|
||||||
|
ws = ws_validate_and_send(rh, Request(self.wss_base_url))
|
||||||
|
assert ws.status == 101
|
||||||
|
ws.close()
|
||||||
|
|
||||||
|
def test_ssl_error(self, handler):
|
||||||
|
with handler(verify=False) as rh:
|
||||||
|
with pytest.raises(SSLError, match=r'ssl(?:v3|/tls) alert handshake failure') as exc_info:
|
||||||
|
ws_validate_and_send(rh, Request(self.bad_wss_host))
|
||||||
|
assert not issubclass(exc_info.type, CertificateVerifyError)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('path,expected', [
|
||||||
|
# Unicode characters should be encoded with uppercase percent-encoding
|
||||||
|
('/中文', '/%E4%B8%AD%E6%96%87'),
|
||||||
|
# don't normalize existing percent encodings
|
||||||
|
('/%c7%9f', '/%c7%9f'),
|
||||||
|
])
|
||||||
|
def test_percent_encode(self, handler, path, expected):
|
||||||
|
with handler() as rh:
|
||||||
|
ws = ws_validate_and_send(rh, Request(f'{self.ws_base_url}{path}'))
|
||||||
|
ws.send('path')
|
||||||
|
assert ws.recv() == expected
|
||||||
|
assert ws.status == 101
|
||||||
|
ws.close()
|
||||||
|
|
||||||
|
def test_remove_dot_segments(self, handler):
|
||||||
|
with handler() as rh:
|
||||||
|
# This isn't a comprehensive test,
|
||||||
|
# but it should be enough to check whether the handler is removing dot segments
|
||||||
|
ws = ws_validate_and_send(rh, Request(f'{self.ws_base_url}/a/b/./../../test'))
|
||||||
|
assert ws.status == 101
|
||||||
|
ws.send('path')
|
||||||
|
assert ws.recv() == '/test'
|
||||||
|
ws.close()
|
||||||
|
|
||||||
|
# We are restricted to known HTTP status codes in http.HTTPStatus
|
||||||
|
# Redirects are not supported for websockets
|
||||||
|
@pytest.mark.parametrize('status', (200, 204, 301, 302, 303, 400, 500, 511))
|
||||||
|
def test_raise_http_error(self, handler, status):
|
||||||
|
with handler() as rh:
|
||||||
|
with pytest.raises(HTTPError) as exc_info:
|
||||||
|
ws_validate_and_send(rh, Request(f'{self.ws_base_url}/gen_{status}'))
|
||||||
|
assert exc_info.value.status == status
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('params,extensions', [
|
||||||
|
({'timeout': sys.float_info.min}, {}),
|
||||||
|
({}, {'timeout': sys.float_info.min}),
|
||||||
|
])
|
||||||
|
def test_read_timeout(self, handler, params, extensions):
|
||||||
|
with handler(**params) as rh:
|
||||||
|
with pytest.raises(TransportError):
|
||||||
|
ws_validate_and_send(rh, Request(self.ws_base_url, extensions=extensions))
|
||||||
|
|
||||||
|
def test_connect_timeout(self, handler):
|
||||||
|
# nothing should be listening on this port
|
||||||
|
connect_timeout_url = 'ws://10.255.255.255'
|
||||||
|
with handler(timeout=0.01) as rh, pytest.raises(TransportError):
|
||||||
|
now = time.time()
|
||||||
|
ws_validate_and_send(rh, Request(connect_timeout_url))
|
||||||
|
assert time.time() - now < DEFAULT_TIMEOUT
|
||||||
|
|
||||||
|
# Per request timeout, should override handler timeout
|
||||||
|
request = Request(connect_timeout_url, extensions={'timeout': 0.01})
|
||||||
|
with handler() as rh, pytest.raises(TransportError):
|
||||||
|
now = time.time()
|
||||||
|
ws_validate_and_send(rh, request)
|
||||||
|
assert time.time() - now < DEFAULT_TIMEOUT
|
||||||
|
|
||||||
|
def test_cookies(self, handler):
|
||||||
|
cookiejar = YoutubeDLCookieJar()
|
||||||
|
cookiejar.set_cookie(http.cookiejar.Cookie(
|
||||||
|
version=0, name='test', value='ytdlp', port=None, port_specified=False,
|
||||||
|
domain='127.0.0.1', domain_specified=True, domain_initial_dot=False, path='/',
|
||||||
|
path_specified=True, secure=False, expires=None, discard=False, comment=None,
|
||||||
|
comment_url=None, rest={}))
|
||||||
|
|
||||||
|
with handler(cookiejar=cookiejar) as rh:
|
||||||
|
ws = ws_validate_and_send(rh, Request(self.ws_base_url))
|
||||||
|
ws.send('headers')
|
||||||
|
assert json.loads(ws.recv())['cookie'] == 'test=ytdlp'
|
||||||
|
ws.close()
|
||||||
|
|
||||||
|
with handler() as rh:
|
||||||
|
ws = ws_validate_and_send(rh, Request(self.ws_base_url))
|
||||||
|
ws.send('headers')
|
||||||
|
assert 'cookie' not in json.loads(ws.recv())
|
||||||
|
ws.close()
|
||||||
|
|
||||||
|
ws = ws_validate_and_send(rh, Request(self.ws_base_url, extensions={'cookiejar': cookiejar}))
|
||||||
|
ws.send('headers')
|
||||||
|
assert json.loads(ws.recv())['cookie'] == 'test=ytdlp'
|
||||||
|
ws.close()
|
||||||
|
|
||||||
|
def test_source_address(self, handler):
|
||||||
|
source_address = f'127.0.0.{random.randint(5, 255)}'
|
||||||
|
verify_address_availability(source_address)
|
||||||
|
with handler(source_address=source_address) as rh:
|
||||||
|
ws = ws_validate_and_send(rh, Request(self.ws_base_url))
|
||||||
|
ws.send('source_address')
|
||||||
|
assert source_address == ws.recv()
|
||||||
|
ws.close()
|
||||||
|
|
||||||
|
def test_response_url(self, handler):
|
||||||
|
with handler() as rh:
|
||||||
|
url = f'{self.ws_base_url}/something'
|
||||||
|
ws = ws_validate_and_send(rh, Request(url))
|
||||||
|
assert ws.url == url
|
||||||
|
ws.close()
|
||||||
|
|
||||||
|
def test_request_headers(self, handler):
|
||||||
|
with handler(headers=HTTPHeaderDict({'test1': 'test', 'test2': 'test2'})) as rh:
|
||||||
|
# Global Headers
|
||||||
|
ws = ws_validate_and_send(rh, Request(self.ws_base_url))
|
||||||
|
ws.send('headers')
|
||||||
|
headers = HTTPHeaderDict(json.loads(ws.recv()))
|
||||||
|
assert headers['test1'] == 'test'
|
||||||
|
ws.close()
|
||||||
|
|
||||||
|
# Per request headers, merged with global
|
||||||
|
ws = ws_validate_and_send(rh, Request(
|
||||||
|
self.ws_base_url, headers={'test2': 'changed', 'test3': 'test3'}))
|
||||||
|
ws.send('headers')
|
||||||
|
headers = HTTPHeaderDict(json.loads(ws.recv()))
|
||||||
|
assert headers['test1'] == 'test'
|
||||||
|
assert headers['test2'] == 'changed'
|
||||||
|
assert headers['test3'] == 'test3'
|
||||||
|
ws.close()
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('client_cert', (
|
||||||
|
{'client_certificate': os.path.join(MTLS_CERT_DIR, 'clientwithkey.crt')},
|
||||||
|
{
|
||||||
|
'client_certificate': os.path.join(MTLS_CERT_DIR, 'client.crt'),
|
||||||
|
'client_certificate_key': os.path.join(MTLS_CERT_DIR, 'client.key'),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'client_certificate': os.path.join(MTLS_CERT_DIR, 'clientwithencryptedkey.crt'),
|
||||||
|
'client_certificate_password': 'foobar',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'client_certificate': os.path.join(MTLS_CERT_DIR, 'client.crt'),
|
||||||
|
'client_certificate_key': os.path.join(MTLS_CERT_DIR, 'clientencrypted.key'),
|
||||||
|
'client_certificate_password': 'foobar',
|
||||||
|
},
|
||||||
|
))
|
||||||
|
def test_mtls(self, handler, client_cert):
|
||||||
|
with handler(
|
||||||
|
# Disable client-side validation of unacceptable self-signed testcert.pem
|
||||||
|
# The test is of a check on the server side, so unaffected
|
||||||
|
verify=False,
|
||||||
|
client_cert=client_cert,
|
||||||
|
) as rh:
|
||||||
|
ws_validate_and_send(rh, Request(self.mtls_wss_base_url)).close()
|
||||||
|
|
||||||
|
def test_request_disable_proxy(self, handler):
|
||||||
|
for proxy_proto in handler._SUPPORTED_PROXY_SCHEMES or ['ws']:
|
||||||
|
# Given handler is configured with a proxy
|
||||||
|
with handler(proxies={'ws': f'{proxy_proto}://10.255.255.255'}, timeout=5) as rh:
|
||||||
|
# When a proxy is explicitly set to None for the request
|
||||||
|
ws = ws_validate_and_send(rh, Request(self.ws_base_url, proxies={'http': None}))
|
||||||
|
# Then no proxy should be used
|
||||||
|
assert ws.status == 101
|
||||||
|
ws.close()
|
||||||
|
|
||||||
|
@pytest.mark.skip_handlers_if(
|
||||||
|
lambda _, handler: Features.NO_PROXY not in handler._SUPPORTED_FEATURES, 'handler does not support NO_PROXY')
|
||||||
|
def test_noproxy(self, handler):
|
||||||
|
for proxy_proto in handler._SUPPORTED_PROXY_SCHEMES or ['ws']:
|
||||||
|
# Given the handler is configured with a proxy
|
||||||
|
with handler(proxies={'ws': f'{proxy_proto}://10.255.255.255'}, timeout=5) as rh:
|
||||||
|
for no_proxy in (f'127.0.0.1:{self.ws_port}', '127.0.0.1', 'localhost'):
|
||||||
|
# When request no proxy includes the request url host
|
||||||
|
ws = ws_validate_and_send(rh, Request(self.ws_base_url, proxies={'no': no_proxy}))
|
||||||
|
# Then the proxy should not be used
|
||||||
|
assert ws.status == 101
|
||||||
|
ws.close()
|
||||||
|
|
||||||
|
@pytest.mark.skip_handlers_if(
|
||||||
|
lambda _, handler: Features.ALL_PROXY not in handler._SUPPORTED_FEATURES, 'handler does not support ALL_PROXY')
|
||||||
|
def test_allproxy(self, handler):
|
||||||
|
supported_proto = traverse_obj(handler._SUPPORTED_PROXY_SCHEMES, 0, default='ws')
|
||||||
|
# This is a bit of a hacky test, but it should be enough to check whether the handler is using the proxy.
|
||||||
|
# 0.1s might not be enough of a timeout if proxy is not used in all cases, but should still get failures.
|
||||||
|
with handler(proxies={'all': f'{supported_proto}://10.255.255.255'}, timeout=0.1) as rh:
|
||||||
|
with pytest.raises(TransportError):
|
||||||
|
ws_validate_and_send(rh, Request(self.ws_base_url)).close()
|
||||||
|
|
||||||
|
with handler(timeout=0.1) as rh:
|
||||||
|
with pytest.raises(TransportError):
|
||||||
|
ws_validate_and_send(
|
||||||
|
rh, Request(self.ws_base_url, proxies={'all': f'{supported_proto}://10.255.255.255'})).close()
|
||||||
|
|
||||||
|
|
||||||
|
def create_fake_ws_connection(raised):
|
||||||
|
import websockets.sync.client
|
||||||
|
|
||||||
|
class FakeWsConnection(websockets.sync.client.ClientConnection):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
class FakeResponse:
|
||||||
|
body = b''
|
||||||
|
headers = {}
|
||||||
|
status_code = 101
|
||||||
|
reason_phrase = 'test'
|
||||||
|
|
||||||
|
self.response = FakeResponse()
|
||||||
|
|
||||||
|
def send(self, *args, **kwargs):
|
||||||
|
raise raised()
|
||||||
|
|
||||||
|
def recv(self, *args, **kwargs):
|
||||||
|
raise raised()
|
||||||
|
|
||||||
|
def close(self, *args, **kwargs):
|
||||||
|
return
|
||||||
|
|
||||||
|
return FakeWsConnection()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||||
|
class TestWebsocketsRequestHandler:
|
||||||
|
@pytest.mark.parametrize('raised,expected', [
|
||||||
|
# https://websockets.readthedocs.io/en/stable/reference/exceptions.html
|
||||||
|
(lambda: websockets.exceptions.InvalidURI(msg='test', uri='test://'), RequestError),
|
||||||
|
# Requires a response object. Should be covered by HTTP error tests.
|
||||||
|
# (lambda: websockets.exceptions.InvalidStatus(), TransportError),
|
||||||
|
(lambda: websockets.exceptions.InvalidHandshake(), TransportError),
|
||||||
|
# These are subclasses of InvalidHandshake
|
||||||
|
(lambda: websockets.exceptions.InvalidHeader(name='test'), TransportError),
|
||||||
|
(lambda: websockets.exceptions.NegotiationError(), TransportError),
|
||||||
|
# Catch-all
|
||||||
|
(lambda: websockets.exceptions.WebSocketException(), TransportError),
|
||||||
|
(lambda: TimeoutError(), TransportError),
|
||||||
|
# These may be raised by our create_connection implementation, which should also be caught
|
||||||
|
(lambda: OSError(), TransportError),
|
||||||
|
(lambda: ssl.SSLError(), SSLError),
|
||||||
|
(lambda: ssl.SSLCertVerificationError(), CertificateVerifyError),
|
||||||
|
(lambda: socks.ProxyError(), ProxyError),
|
||||||
|
])
|
||||||
|
def test_request_error_mapping(self, handler, monkeypatch, raised, expected):
|
||||||
|
import websockets.sync.client
|
||||||
|
|
||||||
|
import yt_dlp.networking._websockets
|
||||||
|
with handler() as rh:
|
||||||
|
def fake_connect(*args, **kwargs):
|
||||||
|
raise raised()
|
||||||
|
monkeypatch.setattr(yt_dlp.networking._websockets, 'create_connection', lambda *args, **kwargs: None)
|
||||||
|
monkeypatch.setattr(websockets.sync.client, 'connect', fake_connect)
|
||||||
|
with pytest.raises(expected) as exc_info:
|
||||||
|
rh.send(Request('ws://fake-url'))
|
||||||
|
assert exc_info.type is expected
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('raised,expected,match', [
|
||||||
|
# https://websockets.readthedocs.io/en/stable/reference/sync/client.html#websockets.sync.client.ClientConnection.send
|
||||||
|
(lambda: websockets.exceptions.ConnectionClosed(None, None), TransportError, None),
|
||||||
|
(lambda: RuntimeError(), TransportError, None),
|
||||||
|
(lambda: TimeoutError(), TransportError, None),
|
||||||
|
(lambda: TypeError(), RequestError, None),
|
||||||
|
(lambda: socks.ProxyError(), ProxyError, None),
|
||||||
|
# Catch-all
|
||||||
|
(lambda: websockets.exceptions.WebSocketException(), TransportError, None),
|
||||||
|
])
|
||||||
|
def test_ws_send_error_mapping(self, handler, monkeypatch, raised, expected, match):
|
||||||
|
from yt_dlp.networking._websockets import WebsocketsResponseAdapter
|
||||||
|
ws = WebsocketsResponseAdapter(create_fake_ws_connection(raised), url='ws://fake-url')
|
||||||
|
with pytest.raises(expected, match=match) as exc_info:
|
||||||
|
ws.send('test')
|
||||||
|
assert exc_info.type is expected
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('raised,expected,match', [
|
||||||
|
# https://websockets.readthedocs.io/en/stable/reference/sync/client.html#websockets.sync.client.ClientConnection.recv
|
||||||
|
(lambda: websockets.exceptions.ConnectionClosed(None, None), TransportError, None),
|
||||||
|
(lambda: RuntimeError(), TransportError, None),
|
||||||
|
(lambda: TimeoutError(), TransportError, None),
|
||||||
|
(lambda: socks.ProxyError(), ProxyError, None),
|
||||||
|
# Catch-all
|
||||||
|
(lambda: websockets.exceptions.WebSocketException(), TransportError, None),
|
||||||
|
])
|
||||||
|
def test_ws_recv_error_mapping(self, handler, monkeypatch, raised, expected, match):
|
||||||
|
from yt_dlp.networking._websockets import WebsocketsResponseAdapter
|
||||||
|
ws = WebsocketsResponseAdapter(create_fake_ws_connection(raised), url='ws://fake-url')
|
||||||
|
with pytest.raises(expected, match=match) as exc_info:
|
||||||
|
ws.recv()
|
||||||
|
assert exc_info.type is expected
|
@ -1,34 +0,0 @@
|
|||||||
{
|
|
||||||
"latest": "2013.01.06",
|
|
||||||
"signature": "72158cdba391628569ffdbea259afbcf279bbe3d8aeb7492690735dc1cfa6afa754f55c61196f3871d429599ab22f2667f1fec98865527b32632e7f4b3675a7ef0f0fbe084d359256ae4bba68f0d33854e531a70754712f244be71d4b92e664302aa99653ee4df19800d955b6c4149cd2b3f24288d6e4b40b16126e01f4c8ce6",
|
|
||||||
"versions": {
|
|
||||||
"2013.01.02": {
|
|
||||||
"bin": [
|
|
||||||
"http://youtube-dl.org/downloads/2013.01.02/youtube-dl",
|
|
||||||
"f5b502f8aaa77675c4884938b1e4871ebca2611813a0c0e74f60c0fbd6dcca6b"
|
|
||||||
],
|
|
||||||
"exe": [
|
|
||||||
"http://youtube-dl.org/downloads/2013.01.02/youtube-dl.exe",
|
|
||||||
"75fa89d2ce297d102ff27675aa9d92545bbc91013f52ec52868c069f4f9f0422"
|
|
||||||
],
|
|
||||||
"tar": [
|
|
||||||
"http://youtube-dl.org/downloads/2013.01.02/youtube-dl-2013.01.02.tar.gz",
|
|
||||||
"6a66d022ac8e1c13da284036288a133ec8dba003b7bd3a5179d0c0daca8c8196"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"2013.01.06": {
|
|
||||||
"bin": [
|
|
||||||
"http://youtube-dl.org/downloads/2013.01.06/youtube-dl",
|
|
||||||
"64b6ed8865735c6302e836d4d832577321b4519aa02640dc508580c1ee824049"
|
|
||||||
],
|
|
||||||
"exe": [
|
|
||||||
"http://youtube-dl.org/downloads/2013.01.06/youtube-dl.exe",
|
|
||||||
"58609baf91e4389d36e3ba586e21dab882daaaee537e4448b1265392ae86ff84"
|
|
||||||
],
|
|
||||||
"tar": [
|
|
||||||
"http://youtube-dl.org/downloads/2013.01.06/youtube-dl-2013.01.06.tar.gz",
|
|
||||||
"fe77ab20a95d980ed17a659aa67e371fdd4d656d19c4c7950e7b720b0c2f1a86"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1 +1 @@
|
|||||||
@py -bb -Werror -Xdev "%~dp0yt_dlp\__main__.py" %*
|
@py -Werror -Xdev "%~dp0yt_dlp\__main__.py" %*
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue