Compare commits
554 Commits
2021.02.22
...
master
Author | SHA1 | Date |
---|---|---|
dirkf | c5098961b0 | 2 months ago |
dirkf | dbc08fba83 | 2 months ago |
Aiur Adept | 71223bff39 | 2 months ago |
dirkf | e1b3fa242c | 2 months ago |
dirkf | 451046d62a | 2 months ago |
dirkf | 16f5bbc464 | 3 months ago |
dirkf | d35ce6ce95 | 3 months ago |
dirkf | 76ac69917e | 3 months ago |
dirkf | 756f6b45c7 | 3 months ago |
bashonly | 43a74c5fa5 | 3 months ago |
dirkf | a452f9437c | 3 months ago |
unkernet | 36801c62df | 3 months ago |
Sergey Musatov | f4b47754d9 | 3 months ago |
dirkf | 37cea84f77 | 3 months ago |
dirkf | 4652109643 | 3 months ago |
dirkf | 3c466186a8 | 3 months ago |
dirkf | 4d05f84325 | 3 months ago |
dirkf | e0094e63c3 | 3 months ago |
dirkf | fd8242e3ef | 3 months ago |
dirkf | ad01fa6cca | 3 months ago |
dirkf | 2eac0fa379 | 3 months ago |
Paper | 0153b387e5 | 4 months ago |
dirkf | a48fe7491d | 4 months ago |
dirkf | e20ca543f0 | 4 months ago |
dirkf | e39466051f | 4 months ago |
dirkf | d95c0d203f | 4 months ago |
dirkf | 3bde6a5752 | 4 months ago |
dirkf | 50f6c5668a | 4 months ago |
dirkf | b4ff08bd2d | 4 months ago |
kmnx | 88bd8b9f87 | 4 months ago |
dirkf | 21924742f7 | 4 months ago |
dirkf | 768ccccd9b | 4 months ago |
dirkf | eee9a247eb | 4 months ago |
dirkf | 34484e49f5 | 4 months ago |
dirkf | 06da64ee51 | 4 months ago |
dirkf | a08f2b7e45 | 5 months ago |
dirkf | 668332b973 | 5 months ago |
dirkf | 0b2ce3685e | 5 months ago |
dirkf | c2766cb80e | 5 months ago |
dirkf | eb38665438 | 5 months ago |
dirkf | e0727e4ab6 | 6 months ago |
Ori Avtalion | 4ea59c6107 | 6 months ago |
dirkf | 21792b88b7 | 6 months ago |
dirkf | d8f134a664 | 6 months ago |
dirkf | 31a15a7c8d | 6 months ago |
dirkf | 19dc10b986 | 6 months ago |
dirkf | 182f63e82a | 6 months ago |
gy-chen | 71211e7db7 | 6 months ago |
Zizheng Guo | a96a45b2cd | 7 months ago |
hatsomatt | 820fae3b3a | 7 months ago |
dirkf | aef24d97e9 | 7 months ago |
dirkf | f7b30e3f73 | 7 months ago |
dirkf | f66372403f | 7 months ago |
dirkf | 7216fa2ac4 | 7 months ago |
dirkf | acc383b9e3 | 7 months ago |
Hubert Hirtz | f0812d7848 | 7 months ago |
Aaron Tan | 40bd5c1815 | 7 months ago |
dirkf | 70f230f9cf | 7 months ago |
dirkf | 48ddab1f3a | 7 months ago |
dirkf | 7687389f08 | 7 months ago |
dirkf | 4416f82c80 | 8 months ago |
dirkf | bdda6b81df | 8 months ago |
dirkf | 1fd8f802b8 | 8 months ago |
dirkf | 4eaeb9b2c6 | 8 months ago |
dirkf | bec9180e89 | 8 months ago |
dirkf | c58b655a9e | 8 months ago |
dirkf | dc512e3a8a | 8 months ago |
dirkf | f8b0135850 | 8 months ago |
dirkf | 640d39f03a | 8 months ago |
dirkf | 6651871416 | 8 months ago |
mk-pmb | be008e657d | 10 months ago |
Robotix | b1bbc1e502 | 10 months ago |
dirkf | 55a442adae | 10 months ago |
mimvahedi | c62936a5f2 | 10 months ago |
dirkf | 427472351c | 10 months ago |
dirkf | c6538ed323 | 10 months ago |
dirkf | 8d227cb97b | 10 months ago |
dirkf | 4e115e18cb | 10 months ago |
ReenigneArcher | b7fca0fab3 | 10 months ago |
dirkf | 00ef748cc0 | 1 year ago |
dirkf | 66ab0814c4 | 1 year ago |
dirkf | bbd3e7e999 | 1 year ago |
dirkf | 21caaf2380 | 1 year ago |
dirkf | 31f50c8194 | 1 year ago |
dirkf | 7d58f0769a | 1 year ago |
dirkf | 86e3cf5e58 | 1 year ago |
dirkf | 2efc8de4d2 | 1 year ago |
dirkf | e4178b5af3 | 1 year ago |
dirkf | 2d2a4bc832 | 1 year ago |
dirkf | 7d965e6b65 | 1 year ago |
dirkf | abef53466d | 1 year ago |
dirkf | e7926ae9f4 | 1 year ago |
dirkf | 87e578c9b8 | 1 year ago |
dirkf | 0861812d72 | 1 year ago |
dirkf | b870181229 | 1 year ago |
dirkf | a25e9f3c84 | 1 year ago |
dirkf | aac33155e4 | 1 year ago |
dirkf | 2b7dd3b2a2 | 1 year ago |
dirkf | 44faa71b19 | 1 year ago |
dirkf | 7bce2ad441 | 1 year ago |
dirkf | ca71e56c48 | 1 year ago |
dirkf | 2a4e9faa77 | 1 year ago |
dirkf | 74eef6bb5e | 1 year ago |
dirkf | 1fa8b86f0b | 1 year ago |
dirkf | b2ba24bb02 | 1 year ago |
dirkf | a190b55964 | 1 year ago |
dirkf | b2741f2654 | 1 year ago |
dirkf | 8465222041 | 1 year ago |
dirkf | 4339910df3 | 1 year ago |
dirkf | eaaf4c6736 | 1 year ago |
dirkf | 4566e6e53e | 1 year ago |
dirkf | 1e8ccdd2eb | 1 year ago |
dirkf | cb9366eda5 | 1 year ago |
dirkf | d9d07a9581 | 1 year ago |
dirkf | 825a40744b | 1 year ago |
dirkf | 47214e46d8 | 1 year ago |
dirkf | 1d8d5a93f7 | 1 year ago |
dirkf | 1634b1d61e | 1 year ago |
bashonly | 21438a4194 | 1 year ago |
Simon Sawicki | 8334ec961b | 1 year ago |
bashonly | 3801d36416 | 1 year ago |
dirkf | b383be9887 | 1 year ago |
dirkf | 46fde7caee | 1 year ago |
dirkf | 648dc5304c | 1 year ago |
dirkf | 1720c04dc5 | 1 year ago |
dirkf | d5ef405c5d | 1 year ago |
dirkf | f47fdb9564 | 1 year ago |
dirkf | b6dff4073d | 1 year ago |
dirkf | f24bc9272e | 1 year ago |
dirkf | b08a580906 | 1 year ago |
dirkf | 2500300c2a | 1 year ago |
dirkf | 58fc5bde47 | 1 year ago |
dirkf | fa7f0effbe | 1 year ago |
dirkf | ebdc82c586 | 1 year ago |
pukkandan | 9112e668a5 | 1 year ago |
dirkf | 07af47960f | 1 year ago |
dirkf | ae8ba2c319 | 1 year ago |
dirkf | d6433cbb2c | 1 year ago |
dirkf | ff75c300f5 | 1 year ago |
dirkf | a2534f7b88 | 1 year ago |
dirkf | b8a86dcf1a | 1 year ago |
dirkf | 2389c7cbd3 | 1 year ago |
dirkf | ee731f3d00 | 1 year ago |
dirkf | 1f7c6f8b2b | 1 year ago |
dirkf | d89c2137ba | 1 year ago |
dirkf | d1c6c5c4d6 | 1 year ago |
dirkf | 6ed3433828 | 1 year ago |
dirkf | a85a875fef | 1 year ago |
dirkf | 11cc3f3ad0 | 1 year ago |
dirkf | 64d6dd64c8 | 1 year ago |
dirkf | 211cbfd5d4 | 1 year ago |
dirkf | 26035bde46 | 1 year ago |
dirkf | 2da3fa04a6 | 1 year ago |
Gabriel Nagy | 735e87adfc | 1 year ago |
dirkf | fe7e13066c | 1 year ago |
dirkf | 213d1d91bf | 2 years ago |
dirkf | f8253a5289 | 2 years ago |
dirkf | d6ae3b77cd | 2 years ago |
dirkf | 9f4d83ff42 | 2 years ago |
dirkf | 25124bd640 | 2 years ago |
dirkf | 78da22489b | 2 years ago |
dirkf | 557dbac173 | 2 years ago |
dirkf | cdf40b6aa6 | 2 years ago |
pukkandan | 3f6d2bd76f | 2 years ago |
pukkandan | 88f28f620b | 2 years ago |
dirkf | f35b757c82 | 2 years ago |
dirkf | 45495228b7 | 2 years ago |
dirkf | 6fece0a96b | 2 years ago |
dirkf | 70ff013910 | 2 years ago |
dirkf | e8de54bce5 | 2 years ago |
dirkf | baa6c5e95c | 2 years ago |
dirkf | 5c985d4f81 | 2 years ago |
dirkf | 8c86fd33dc | 2 years ago |
Sophira | 27d41d7365 | 2 years ago |
dirkf | 0402710227 | 2 years ago |
pukkandan | 3e92c60fcd | 2 years ago |
pukkandan | 3da17834a4 | 2 years ago |
dirkf | f7ce98a21e | 2 years ago |
dirkf | e67e52a8f8 | 2 years ago |
pukkandan | 1d3751c3fe | 2 years ago |
df | 6067451e43 | 2 years ago |
dirkf | 57802e632f | 2 years ago |
dirkf | 2dd6c6edd8 | 2 years ago |
dirkf | dd9aa74bee | 2 years ago |
dirkf | 42b098dd79 | 2 years ago |
fonkap | 6f8c2635a5 | 2 years ago |
fonkap | de48105dd8 | 2 years ago |
fonkap | 822f19f05d | 2 years ago |
teddy171 | 33db85c571 | 2 years ago |
Valentin Metz | f33923cba7 | 2 years ago |
dirkf | e8198c517b | 2 years ago |
dirkf | bafb6dec72 | 2 years ago |
dirkf | 4e04f10499 | 2 years ago |
dirkf | 90c9f789d9 | 2 years ago |
dirkf | 249f2b6316 | 2 years ago |
dirkf | d6b14ba316 | 2 years ago |
dirkf | 30e986b834 | 2 years ago |
dirkf | 58988c1421 | 2 years ago |
dirkf | e19ec52322 | 2 years ago |
dirkf | f2f90887ca | 2 years ago |
dirkf | cd987e6fca | 2 years ago |
dirkf | d947ffe8e3 | 2 years ago |
dirkf | 384f632e8a | 2 years ago |
dirkf | 9d17948b5a | 2 years ago |
afterdelight | f316f5d4e3 | 2 years ago |
dirkf | bc6f94e459 | 2 years ago |
Epsilonator | be3392a0d4 | 2 years ago |
zhangeric-15 | 6d829d8119 | 2 years ago |
Ruowang Sun | 98b0cf1cd0 | 2 years ago |
Leon Etienne | e9611a2a36 | 2 years ago |
JChris246 | 807e593a32 | 2 years ago |
Rodrigo Dias | 297fbff23b | 2 years ago |
Brian Marks | 37cbdfa0e7 | 2 years ago |
dirkf | 295736c9cb | 2 years ago |
pukkandan | 14ef89a8da | 2 years ago |
dirkf | 195f22f679 | 2 years ago |
dirkf | fc2beab0e7 | 2 years ago |
FraFraFra-LongD | 1a4fbe8462 | 2 years ago |
dirkf | c2f9be3e63 | 2 years ago |
dirkf | 604762a9f8 | 2 years ago |
Moises Lima | 47e70fff8b | 2 years ago |
dirkf | de39d1281c | 2 years ago |
Andrei Lebedev | 27ed77aabb | 2 years ago |
dirkf | c4b19a8816 | 2 years ago |
dirkf | 087ddc2371 | 2 years ago |
dirkf | 65ccb0dd4e | 2 years ago |
dirkf | a874871801 | 2 years ago |
dirkf | b7c25959f0 | 2 years ago |
dirkf | f102e3dc4e | 2 years ago |
dirkf | a19855f0f5 | 2 years ago |
Xie Yanbo | ce5d36486e | 2 years ago |
Xie Yanbo | d25cf62086 | 2 years ago |
dirkf | 502cefa41f | 2 years ago |
dirkf | 0faa45d6c0 | 2 years ago |
ache | 447edc48e6 | 2 years ago |
dirkf | ee8560d01e | 2 years ago |
dirkf | 7135277fec | 2 years ago |
dirkf | 7bbd5b13d4 | 2 years ago |
Xie Yanbo | c91cbf6072 | 2 years ago |
dirkf | 11b284c81f | 2 years ago |
dirkf | c94a459a24 | 2 years ago |
dirkf | 6e2626f092 | 2 years ago |
dirkf | c282e5f8d7 | 2 years ago |
dirkf | 2ced5a7912 | 2 years ago |
Xiyue | 82e4eca711 | 2 years ago |
dirkf | 1b1442887e | 2 years ago |
dirkf | 22127b271c | 2 years ago |
coletdjnz | d35557a75d | 2 years ago |
dirkf | 9493ffdb8b | 2 years ago |
pukkandan | 7009bb9f31 | 2 years ago |
dirkf | 218c423bc0 | 2 years ago |
dirkf | 55c823634d | 2 years ago |
dirkf | 4050e10a4c | 2 years ago |
dirkf | ed5c44e7b7 | 2 years ago |
dirkf | 0f6422590e | 2 years ago |
dirkf | 4c6fba3765 | 2 years ago |
dirkf | d619dd712f | 2 years ago |
dirkf | 573b13410e | 2 years ago |
dirkf | 66e58dccc2 | 2 years ago |
dirkf | 556862bc91 | 2 years ago |
gudata | a8d5316aaf | 2 years ago |
dirkf | fd3f3bebd0 | 2 years ago |
dirkf | 46b8ae2f52 | 2 years ago |
dirkf | 538ec65ba7 | 2 years ago |
dirkf | b0a60ce203 | 2 years ago |
dirkf | e52e8b8111 | 2 years ago |
dirkf | d231b56717 | 2 years ago |
dirkf | e6a836d54c | 2 years ago |
dirkf | deee741fb1 | 2 years ago |
Wes | adb5294177 | 2 years ago |
Kyraminol Endyeran | 5f5c127ece | 2 years ago |
dirkf | 090acd58c1 | 2 years ago |
dirkf | a03b9775d5 | 2 years ago |
dirkf | 8a158a936c | 2 years ago |
dirkf | 11665dd236 | 2 years ago |
dirkf | cc179df346 | 2 years ago |
pukkandan | 0700fde640 | 2 years ago |
dirkf | 811c480f7b | 2 years ago |
dirkf | 3aa94d7945 | 2 years ago |
dirkf | ef044be34b | 2 years ago |
dirkf | 530f4582d0 | 2 years ago |
pukkandan | 1baa0f5f66 | 2 years ago |
LewdyCoder | 9aa8e5340f | 2 years ago |
dirkf | 04fd3289d3 | 2 years ago |
dirkf | 52c3751df7 | 2 years ago |
dirkf | 187a48aee2 | 2 years ago |
Jacob Chapman | be35e5343a | 2 years ago |
dirkf | c3deca86ae | 2 years ago |
dirkf | c7965b9fc2 | 2 years ago |
dirkf | e988fa4523 | 2 years ago |
dirkf | e27d8d819f | 2 years ago |
Árni Dagur | ebc627847c | 2 years ago |
dirkf | a0068bd6be | 2 years ago |
dirkf | b764dbe773 | 3 years ago |
nixxo | 871645a4a4 | 3 years ago |
nixxo | 1f50a07771 | 3 years ago |
nixxo | 9e5ca66f16 | 3 years ago |
lihan7 | 17d295a1ec | 3 years ago |
dirkf | 49c5293014 | 3 years ago |
df | 6508688e88 | 3 years ago |
dirkf | 4194d253c0 | 3 years ago |
dirkf | f8e543c906 | 3 years ago |
dirkf | c4d1738316 | 3 years ago |
dirkf | 1f13ccfd7f | 3 years ago |
marieell | 923292ba64 | 3 years ago |
Lesmiscore (Naoya Ozaki) | 782bfd26db | 3 years ago |
Vladimir Stavrinov | 3472227074 | 3 years ago |
Petr Vaněk | bf23bc0489 | 3 years ago |
Petr Vaněk | 85bf26c1d0 | 3 years ago |
Petr Vaněk | d8adca1b66 | 3 years ago |
Petr Vaněk | d02064218b | 3 years ago |
Petr Vaněk | b1297308fb | 3 years ago |
Petr Vaněk | 8088ce036a | 3 years ago |
Petr Vaněk | 29f7bfc4d7 | 3 years ago |
dirkf | 74f8cc48af | 3 years ago |
kikuyan | 8ff961d10f | 3 years ago |
dirkf | 266b6ef185 | 3 years ago |
dirkf | 825d3426c5 | 3 years ago |
dirkf | 47b0c8697a | 3 years ago |
Seonghyeon Cho | 734dfbb4e3 | 3 years ago |
df | ddc080a562 | 3 years ago |
Abdullah Ibn Fulan | 16a3fe2ba6 | 3 years ago |
Abdullah Ibn Fulan | c820a284a2 | 3 years ago |
dirkf | 58babe9af7 | 3 years ago |
df | 6d4932f023 | 3 years ago |
dirkf | 92d73ef393 | 3 years ago |
dirkf | 91278f4b6b | 3 years ago |
dirkf | 73e1ab6125 | 3 years ago |
dirkf | 584715a803 | 3 years ago |
dirkf | e00b0eab1e | 3 years ago |
dirkf | 005339d637 | 3 years ago |
Chris Rose | 23ad6402a6 | 3 years ago |
dirkf | 9642344965 | 3 years ago |
dirkf | 568c7005d5 | 3 years ago |
dirkf | 5cb4833f40 | 3 years ago |
dirkf | 5197336de6 | 3 years ago |
dirkf | 01824d275b | 3 years ago |
dirkf | 39a98b09a2 | 3 years ago |
dirkf | f0a05a55c2 | 3 years ago |
dirkf | 4186e81777 | 3 years ago |
dirkf | b494824286 | 3 years ago |
dirkf | 8248133e5e | 3 years ago |
dirkf | 27dbf6f0ab | 3 years ago |
dirkf | 61d791726f | 3 years ago |
pukkandan | 0c0876f790 | 3 years ago |
dirkf | 7a497f1405 | 3 years ago |
dirkf | 5add3f4373 | 3 years ago |
pukkandan | 78ce962f4f | 3 years ago |
dirkf | 41f0043983 | 3 years ago |
dirkf | 34c06b16f5 | 3 years ago |
dirkf | 1e677567cd | 3 years ago |
df | af9e72507e | 3 years ago |
dirkf | 6ca7b77696 | 3 years ago |
dirkf | 9d142109f4 | 3 years ago |
dirkf | 1ca673bd98 | 3 years ago |
df | e1eae16b56 | 3 years ago |
df | 96f87aaa3b | 3 years ago |
df | 5f5de51a49 | 3 years ago |
df | 39ca35e765 | 3 years ago |
df | d76d59d99d | 3 years ago |
df | 2c2c2bd348 | 3 years ago |
df | 46e0a729b2 | 3 years ago |
df | 57044eaceb | 3 years ago |
pukkandan | a3373da70c | 3 years ago |
pukkandan | 2c4cb134a9 | 3 years ago |
pukkandan | bfe72723d8 | 3 years ago |
pukkandan | ed99d68bdd | 3 years ago |
Sergey M․ | 5014bd67c2 | 3 years ago |
Sergey M․ | e418823350 | 3 years ago |
lanegramling | b5242da7d2 | 3 years ago |
bopol | a803582717 | 3 years ago |
Remita Amine | 7fb9564420 | 3 years ago |
Aleri Kaisattera | 379f52a495 | 3 years ago |
Sergey M․ | cb668eb973 | 3 years ago |
Sergey M․ | 751c9ae39a | 3 years ago |
Sergey M․ | da32828208 | 3 years ago |
Sergey M․ | 2ccee8db74 | 3 years ago |
Sergey M․ | 47f2f2fbe9 | 3 years ago |
Sergey M․ | 03ab02730f | 3 years ago |
Tianyi Shi | 4c77a2e538 | 3 years ago |
bopol | 4131703001 | 3 years ago |
Logan B | cc21aebe90 | 3 years ago |
Sergey M․ | 57b9a4b4c6 | 3 years ago |
kikuyan | 3a7ef27cf3 | 3 years ago |
kikuyan | a7f61feab2 | 3 years ago |
kikuyan | 8fe5d54eb7 | 3 years ago |
kikuyan | d156bc8d59 | 3 years ago |
Sergey M | c2350cac24 | 3 years ago |
Sergey M․ | b224cf39d5 | 3 years ago |
Sergey M․ | 5f85eb820c | 3 years ago |
Sergey M․ | bb7ac1ed66 | 3 years ago |
Sergey M․ | fdf91c52a8 | 3 years ago |
Sergey M․ | 943070af4a | 3 years ago |
Remita Amine | 82f3993ba3 | 3 years ago |
Sergey M․ | d495292852 | 3 years ago |
Sergey M․ | 2ee6c7f110 | 3 years ago |
Sergey M․ | 6511b8e8d7 | 3 years ago |
Sergey M․ | f3cd1d9cec | 3 years ago |
phlip | e13a01061d | 3 years ago |
Sergey M․ | 24297a42ef | 3 years ago |
Remita Amine | 1980ff4550 | 3 years ago |
Remita Amine | dfbbe2902f | 3 years ago |
Remita Amine | e1a9d0ef78 | 3 years ago |
Sergey M․ | f47627a1c9 | 3 years ago |
Sergey M․ | efeb9e0fbf | 3 years ago |
Sergey M․ | e90a890f01 | 3 years ago |
Sergey M․ | 199c645bee | 3 years ago |
Sergey M․ | 503a3744ad | 3 years ago |
kr4ssi | ef03721f47 | 3 years ago |
Sergey M․ | 1e8aaa1d15 | 3 years ago |
Sergey M․ | 6423d7054e | 3 years ago |
Sergey M․ | eb5080286a | 3 years ago |
Sergey M․ | 286e01ce30 | 3 years ago |
Sergey M․ | 8536dcafd8 | 3 years ago |
Sergey M․ | 552b139911 | 3 years ago |
Lukas Anzinger | 2202cef0e4 | 3 years ago |
Sergey M․ | a726009987 | 3 years ago |
catboy | 03afef7538 | 3 years ago |
Jacob Chapman | b797c1cc75 | 3 years ago |
Sergey M․ | 04be55307a | 3 years ago |
Sergey M․ | 504e4d804d | 3 years ago |
Sergey M․ | 1786cd3fe4 | 3 years ago |
Ben Rog-Wilhelm | b8645c1f58 | 3 years ago |
Ben Rog-Wilhelm | fe05191b8c | 3 years ago |
Sergey M․ | 0204838163 | 3 years ago |
Sergey M․ | a0df8a0617 | 3 years ago |
Sergey M․ | d1b9a5e2ef | 3 years ago |
Sergey M․ | ff04d43c46 | 3 years ago |
Sergey M․ | d2f72c40db | 3 years ago |
Sergey M․ | e33dfb445c | 3 years ago |
Sergey M․ | 94520568b3 | 3 years ago |
Sergey M․ | 273964d190 | 3 years ago |
Sergey M․ | 346dd3b5e8 | 3 years ago |
schnusch | f5c2c06231 | 3 years ago |
Sergey M․ | 57eaaff5cf | 3 years ago |
Sergey M․ | 999329cf6b | 3 years ago |
catboy | c6ab792990 | 3 years ago |
The Hatsune Daishi | 0db79d8181 | 3 years ago |
Sergey M․ | 7e8b3f9439 | 3 years ago |
Sergey M․ | ac19c3ac80 | 3 years ago |
Sergey M․ | c4a451bcdd | 3 years ago |
Sergey M․ | 5ad69d3d0e | 3 years ago |
Sergey M․ | 32290307a4 | 3 years ago |
Sergey M․ | dab83a2597 | 3 years ago |
dirkf | 41920fc80e | 3 years ago |
Sergey M․ | 9f6c03a006 | 4 years ago |
Sergey M․ | 596b26606c | 4 years ago |
Sergey M․ | f20b505b46 | 4 years ago |
Sergey M․ | cfee2dfe83 | 4 years ago |
Sergey M․ | 30a3a4c70f | 4 years ago |
Sergey M․ | a00a7e0cad | 4 years ago |
Sergey M․ | 54558e0baa | 4 years ago |
Sergey M․ | 7c52395479 | 4 years ago |
zraktvor | ea87ed8394 | 4 years ago |
Cássio Ávila | d01e261a15 | 4 years ago |
quyleanh | 79e4ccfc4b | 4 years ago |
Sergey M․ | 06159135ef | 4 years ago |
Aaron Lipinski | 4fb25ff5a3 | 4 years ago |
Sergey M․ | 1b0a13f33c | 4 years ago |
Remita Amine | 27e5a4464d | 4 years ago |
Sergey M․ | 545d6cb9d0 | 4 years ago |
Remita Amine | 006eea564d | 4 years ago |
Remita Amine | 281b8e3443 | 4 years ago |
Remita Amine | c0c5134c57 | 4 years ago |
Sergey M․ | 72a2c0a9ed | 4 years ago |
Sergey M․ | 445db582a2 | 4 years ago |
Sergey M․ | 6b116f0c03 | 4 years ago |
Sergey M․ | 70d0d4f9be | 4 years ago |
Sergey M․ | 6b315d96bc | 4 years ago |
guredora | 25b1287323 | 4 years ago |
Remita Amine | 760c911299 | 4 years ago |
Remita Amine | 162bf9e10a | 4 years ago |
Remita Amine | 6beb1ac65b | 4 years ago |
Remita Amine | 3ae9c0f410 | 4 years ago |
Remita Amine | e165f5641f | 4 years ago |
RomanEmelyanov | aee6feb02a | 4 years ago |
Remita Amine | 654b4f4ff2 | 4 years ago |
Remita Amine | 1df2596f81 | 4 years ago |
Remita Amine | 04d4a3b136 | 4 years ago |
Allan Daemon | 392c467f95 | 4 years ago |
Vid | c5aa8f36bf | 4 years ago |
Remita Amine | 3748863070 | 4 years ago |
Sergey M․ | ca304beb15 | 4 years ago |
Sergey M․ | e789bb1aa4 | 4 years ago |
Sergey M․ | 14f29f087e | 4 years ago |
Remita Amine | b97fb2edac | 4 years ago |
Remita Amine | 28bab774a0 | 4 years ago |
Sergey M․ | 8f493de9fb | 4 years ago |
Sergey M․ | 207bc35d34 | 4 years ago |
Remita Amine | 955894e72f | 4 years ago |
Sergey M․ | 287e50b56b | 4 years ago |
Chris Hranj | da762c4e32 | 4 years ago |
Remita Amine | 87a8bde777 | 4 years ago |
Remita Amine | 49fc0a567f | 4 years ago |
Remita Amine | cc777dcaa0 | 4 years ago |
Remita Amine | c785911870 | 4 years ago |
Remita Amine | 605e7b5e47 | 4 years ago |
Remita Amine | 8562218350 | 4 years ago |
Sergey M․ | 76da1c954a | 4 years ago |
Sergey M․ | c2fbfb49da | 4 years ago |
Roman Sebastian Karwacik | d1069d33b4 | 4 years ago |
The Hatsune Daishi | eafcadea26 | 4 years ago |
Remita Amine | a40002444e | 4 years ago |
Sergey M․ | 5208ae92fc | 4 years ago |
Remita Amine | 8117d613ac | 4 years ago |
Martin Ström | 00b4d72d1e | 4 years ago |
Remita Amine | 21ccd0d7f4 | 4 years ago |
Sergey M․ | 7e79ba7dd6 | 4 years ago |
Remita Amine | fa6bf0a711 | 4 years ago |
Remita Amine | f912d6c8cf | 4 years ago |
Sergey M․ | 357bfe251d | 4 years ago |
Remita Amine | 3be098010f | 4 years ago |
Remita Amine | 9955bb4a27 | 4 years ago |
Sergey M․ | ebfd66c4b1 | 4 years ago |
Sergey M․ | b509d24b2f | 4 years ago |
Sergey M․ | 1860d0f41c | 4 years ago |
Remita Amine | 60845121ca | 4 years ago |
Remita Amine | 1182f9567b | 4 years ago |
Remita Amine | ef414343e5 | 4 years ago |
Remita Amine | 43d986acd8 | 4 years ago |
Remita Amine | 9c644a6419 | 4 years ago |
Remita Amine | fc2c6d5323 | 4 years ago |
Remita Amine | 64ed3af328 | 4 years ago |
Sergey M․ | bae7dbf78b | 4 years ago |
Sergey M․ | 15c24b0346 | 4 years ago |
Sergey M․ | 477bff6906 | 4 years ago |
Sergey M․ | 1a1ccd9a6e | 4 years ago |
Sergey M․ | 7dc513487f | 4 years ago |
Remita Amine | c6a14755bb | 4 years ago |
Remita Amine | 7f064d50db | 4 years ago |
Remita Amine | b8b622fbeb | 4 years ago |
Remita Amine | ec64ec9651 | 4 years ago |
Sergey M․ | f68692b004 | 4 years ago |
Sergey M․ | 8c9766f4bf | 4 years ago |
Sergey M․ | 061c030133 | 4 years ago |
Remita Amine | 8f56907afa | 4 years ago |
Remita Amine | e1adb3ed4f | 4 years ago |
dirkf | e465b25c1f | 4 years ago |
Sergey M․ | 7c06216abf | 4 years ago |
Sergey M․ | 0002888627 | 4 years ago |
Sergey M․ | 3fb14cd214 | 4 years ago |
Remita Amine | bee6182680 | 4 years ago |
Remita Amine | 38fe5e239a | 4 years ago |
Remita Amine | 678d46f6bb | 4 years ago |
Alexander Seiler | 3c58f9e0b9 | 4 years ago |
Remita Amine | ef28e33249 | 4 years ago |
nixxo | 9662e4964b | 4 years ago |
Remita Amine | 44603290e5 | 4 years ago |
Remita Amine | 1631fca1ee | 4 years ago |
Remita Amine | 295860ff00 | 4 years ago |
Sergey M․ | 8cb4b71909 | 4 years ago |
Remita Amine | d81421af4b | 4 years ago |
nixxo | 7422a2194f | 4 years ago |
Remita Amine | 2090dbdc8c | 4 years ago |
@ -0,0 +1 @@
|
|||||||
|
blank_issues_enabled: false
|
@ -1,74 +1,479 @@
|
|||||||
name: CI
|
name: CI
|
||||||
on: [push, pull_request]
|
|
||||||
|
env:
|
||||||
|
all-cpython-versions: 2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, 3.7, 3.8, 3.9, 3.10, 3.11, 3.12
|
||||||
|
main-cpython-versions: 2.7, 3.2, 3.5, 3.9, 3.11
|
||||||
|
pypy-versions: pypy-2.7, pypy-3.6, pypy-3.7
|
||||||
|
cpython-versions: main
|
||||||
|
test-set: core
|
||||||
|
# Python beta version to be built using pyenv before setup-python support
|
||||||
|
# Must also be included in all-cpython-versions
|
||||||
|
next: 3.13
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
# push inputs aren't known to GitHub
|
||||||
|
inputs:
|
||||||
|
cpython-versions:
|
||||||
|
type: string
|
||||||
|
default: all
|
||||||
|
test-set:
|
||||||
|
type: string
|
||||||
|
default: core
|
||||||
|
pull_request:
|
||||||
|
# pull_request inputs aren't known to GitHub
|
||||||
|
inputs:
|
||||||
|
cpython-versions:
|
||||||
|
type: string
|
||||||
|
default: main
|
||||||
|
test-set:
|
||||||
|
type: string
|
||||||
|
default: both
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
cpython-versions:
|
||||||
|
type: choice
|
||||||
|
description: CPython versions (main = 2.7, 3.2, 3.5, 3.9, 3.11)
|
||||||
|
options:
|
||||||
|
- all
|
||||||
|
- main
|
||||||
|
required: true
|
||||||
|
default: main
|
||||||
|
test-set:
|
||||||
|
type: choice
|
||||||
|
description: core, download
|
||||||
|
options:
|
||||||
|
- both
|
||||||
|
- core
|
||||||
|
- download
|
||||||
|
required: true
|
||||||
|
default: both
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
select:
|
||||||
|
name: Select tests from inputs
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
cpython-versions: ${{ steps.run.outputs.cpython-versions }}
|
||||||
|
test-set: ${{ steps.run.outputs.test-set }}
|
||||||
|
own-pip-versions: ${{ steps.run.outputs.own-pip-versions }}
|
||||||
|
steps:
|
||||||
|
# push and pull_request inputs aren't known to GitHub (pt3)
|
||||||
|
- name: Set push defaults
|
||||||
|
if: ${{ github.event_name == 'push' }}
|
||||||
|
env:
|
||||||
|
cpython-versions: all
|
||||||
|
test-set: core
|
||||||
|
run: |
|
||||||
|
echo "cpython-versions=${{env.cpython-versions}}" >> "$GITHUB_ENV"
|
||||||
|
echo "test_set=${{env.test_set}}" >> "$GITHUB_ENV"
|
||||||
|
- name: Get pull_request inputs
|
||||||
|
if: ${{ github.event_name == 'pull_request' }}
|
||||||
|
env:
|
||||||
|
cpython-versions: main
|
||||||
|
test-set: both
|
||||||
|
run: |
|
||||||
|
echo "cpython-versions=${{env.cpython-versions}}" >> "$GITHUB_ENV"
|
||||||
|
echo "test_set=${{env.test_set}}" >> "$GITHUB_ENV"
|
||||||
|
- name: Make version array
|
||||||
|
id: run
|
||||||
|
run: |
|
||||||
|
# Make a JSON Array from comma/space-separated string (no extra escaping)
|
||||||
|
json_list() { \
|
||||||
|
ret=""; IFS="${IFS},"; set -- $*; \
|
||||||
|
for a in "$@"; do \
|
||||||
|
ret=$(printf '%s"%s"' "${ret}${ret:+, }" "$a"); \
|
||||||
|
done; \
|
||||||
|
printf '[%s]' "$ret"; }
|
||||||
|
tests="${{ inputs.test-set || env.test-set }}"
|
||||||
|
[ $tests = both ] && tests="core download"
|
||||||
|
printf 'test-set=%s\n' "$(json_list $tests)" >> "$GITHUB_OUTPUT"
|
||||||
|
versions="${{ inputs.cpython-versions || env.cpython-versions }}"
|
||||||
|
if [ "$versions" = all ]; then \
|
||||||
|
versions="${{ env.all-cpython-versions }}"; else \
|
||||||
|
versions="${{ env.main-cpython-versions }}"; \
|
||||||
|
fi
|
||||||
|
printf 'cpython-versions=%s\n' \
|
||||||
|
"$(json_list ${versions}${versions:+, }${{ env.pypy-versions }})" >> "$GITHUB_OUTPUT"
|
||||||
|
# versions with a special get-pip.py in a per-version subdirectory
|
||||||
|
printf 'own-pip-versions=%s\n' \
|
||||||
|
"$(json_list 2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6)" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
tests:
|
tests:
|
||||||
name: Tests
|
name: Run tests
|
||||||
|
needs: select
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
env:
|
||||||
|
PIP: python -m pip
|
||||||
|
PIP_DISABLE_PIP_VERSION_CHECK: true
|
||||||
|
PIP_NO_PYTHON_VERSION_WARNING: true
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-18.04]
|
os: [ubuntu-20.04]
|
||||||
# TODO: python 2.6
|
python-version: ${{ fromJSON(needs.select.outputs.cpython-versions) }}
|
||||||
python-version: [2.7, 3.3, 3.4, 3.5, 3.6, 3.7, 3.8, 3.9, pypy-2.7, pypy-3.6, pypy-3.7]
|
|
||||||
python-impl: [cpython]
|
python-impl: [cpython]
|
||||||
ytdl-test-set: [core, download]
|
ytdl-test-set: ${{ fromJSON(needs.select.outputs.test-set) }}
|
||||||
run-tests-ext: [sh]
|
run-tests-ext: [sh]
|
||||||
include:
|
include:
|
||||||
# python 3.2 is only available on windows via setup-python
|
- os: windows-2019
|
||||||
- os: windows-latest
|
python-version: 3.4
|
||||||
python-version: 3.2
|
|
||||||
python-impl: cpython
|
python-impl: cpython
|
||||||
ytdl-test-set: core
|
ytdl-test-set: ${{ contains(needs.select.outputs.test-set, 'core') && 'core' || 'nocore' }}
|
||||||
run-tests-ext: bat
|
run-tests-ext: bat
|
||||||
- os: windows-latest
|
- os: windows-2019
|
||||||
python-version: 3.2
|
python-version: 3.4
|
||||||
python-impl: cpython
|
python-impl: cpython
|
||||||
ytdl-test-set: download
|
ytdl-test-set: ${{ contains(needs.select.outputs.test-set, 'download') && 'download' || 'nodownload' }}
|
||||||
run-tests-ext: bat
|
run-tests-ext: bat
|
||||||
# jython
|
# jython
|
||||||
- os: ubuntu-18.04
|
- os: ubuntu-20.04
|
||||||
|
python-version: 2.7
|
||||||
python-impl: jython
|
python-impl: jython
|
||||||
ytdl-test-set: core
|
ytdl-test-set: ${{ contains(needs.select.outputs.test-set, 'core') && 'core' || 'nocore' }}
|
||||||
run-tests-ext: sh
|
run-tests-ext: sh
|
||||||
- os: ubuntu-18.04
|
- os: ubuntu-20.04
|
||||||
|
python-version: 2.7
|
||||||
python-impl: jython
|
python-impl: jython
|
||||||
ytdl-test-set: download
|
ytdl-test-set: ${{ contains(needs.select.outputs.test-set, 'download') && 'download' || 'nodownload' }}
|
||||||
run-tests-ext: sh
|
run-tests-ext: sh
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- name: Prepare Linux
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
if: ${{ startswith(matrix.os, 'ubuntu') }}
|
||||||
uses: actions/setup-python@v2
|
shell: bash
|
||||||
if: ${{ matrix.python-impl == 'cpython' }}
|
run: |
|
||||||
|
# apt in runner, if needed, may not be up-to-date
|
||||||
|
sudo apt-get update
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
#-------- Python 3 -----
|
||||||
|
- name: Set up supported Python ${{ matrix.python-version }}
|
||||||
|
id: setup-python
|
||||||
|
if: ${{ matrix.python-impl == 'cpython' && matrix.python-version != '2.6' && matrix.python-version != '2.7' && matrix.python-version != env.next }}
|
||||||
|
# wrap broken actions/setup-python@v4
|
||||||
|
# NB may run apt-get install in Linux
|
||||||
|
uses: ytdl-org/setup-python@v1
|
||||||
|
env:
|
||||||
|
# Temporary workaround for Python 3.5 failures - May 2024
|
||||||
|
PIP_TRUSTED_HOST: "pypi.python.org pypi.org files.pythonhosted.org"
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
|
cache-build: true
|
||||||
|
allow-build: info
|
||||||
|
- name: Locate supported Python ${{ matrix.python-version }}
|
||||||
|
if: ${{ env.pythonLocation }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "PYTHONHOME=${pythonLocation}" >> "$GITHUB_ENV"
|
||||||
|
export expected="${{ steps.setup-python.outputs.python-path }}"
|
||||||
|
dirname() { printf '%s\n' \
|
||||||
|
'import os, sys' \
|
||||||
|
'print(os.path.dirname(sys.argv[1]))' \
|
||||||
|
| ${expected} - "$1"; }
|
||||||
|
expd="$(dirname "$expected")"
|
||||||
|
export python="$(command -v python)"
|
||||||
|
[ "$expd" = "$(dirname "$python")" ] || echo "PATH=$expd:${PATH}" >> "$GITHUB_ENV"
|
||||||
|
[ -x "$python" ] || printf '%s\n' \
|
||||||
|
'import os' \
|
||||||
|
'exp = os.environ["expected"]' \
|
||||||
|
'python = os.environ["python"]' \
|
||||||
|
'exps = os.path.split(exp)' \
|
||||||
|
'if python and (os.path.dirname(python) == exp[0]):' \
|
||||||
|
' exit(0)' \
|
||||||
|
'exps[1] = "python" + os.path.splitext(exps[1])[1]' \
|
||||||
|
'python = os.path.join(*exps)' \
|
||||||
|
'try:' \
|
||||||
|
' os.symlink(exp, python)' \
|
||||||
|
'except AttributeError:' \
|
||||||
|
' os.rename(exp, python)' \
|
||||||
|
| ${expected} -
|
||||||
|
printf '%s\n' \
|
||||||
|
'import sys' \
|
||||||
|
'print(sys.path)' \
|
||||||
|
| ${expected} -
|
||||||
|
#-------- Python next (was 3.12) -
|
||||||
|
- name: Set up CPython 3.next environment
|
||||||
|
if: ${{ matrix.python-impl == 'cpython' && matrix.python-version == env.next }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
PYENV_ROOT=$HOME/.local/share/pyenv
|
||||||
|
echo "PYENV_ROOT=${PYENV_ROOT}" >> "$GITHUB_ENV"
|
||||||
|
- name: Cache Python 3.next
|
||||||
|
id: cachenext
|
||||||
|
if: ${{ matrix.python-impl == 'cpython' && matrix.python-version == env.next }}
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
key: python-${{ env.next }}
|
||||||
|
path: |
|
||||||
|
${{ env.PYENV_ROOT }}
|
||||||
|
- name: Build and set up Python 3.next
|
||||||
|
if: ${{ matrix.python-impl == 'cpython' && matrix.python-version == env.next && ! steps.cachenext.outputs.cache-hit }}
|
||||||
|
# dl and build locally
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
# Install build environment
|
||||||
|
sudo apt-get install -y build-essential llvm libssl-dev tk-dev \
|
||||||
|
libncursesw5-dev libreadline-dev libsqlite3-dev \
|
||||||
|
libffi-dev xz-utils zlib1g-dev libbz2-dev liblzma-dev
|
||||||
|
# Download PyEnv from its GitHub repository.
|
||||||
|
export PYENV_ROOT=${{ env.PYENV_ROOT }}
|
||||||
|
export PATH=$PYENV_ROOT/bin:$PATH
|
||||||
|
git clone "https://github.com/pyenv/pyenv.git" "$PYENV_ROOT"
|
||||||
|
pyenv install ${{ env.next }}
|
||||||
|
- name: Locate Python 3.next
|
||||||
|
if: ${{ matrix.python-impl == 'cpython' && matrix.python-version == env.next }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
PYTHONHOME="$(echo "${{ env.PYENV_ROOT }}/versions/${{ env.next }}."*)"
|
||||||
|
test -n "$PYTHONHOME"
|
||||||
|
echo "PYTHONHOME=$PYTHONHOME" >> "$GITHUB_ENV"
|
||||||
|
echo "PATH=${PYTHONHOME}/bin:$PATH" >> "$GITHUB_ENV"
|
||||||
|
#-------- Python 2.7 --
|
||||||
|
- name: Set up Python 2.7
|
||||||
|
if: ${{ matrix.python-impl == 'cpython' && matrix.python-version == '2.7' }}
|
||||||
|
# install 2.7
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
sudo apt-get install -y python2 python-is-python2
|
||||||
|
echo "PYTHONHOME=/usr" >> "$GITHUB_ENV"
|
||||||
|
#-------- Python 2.6 --
|
||||||
|
- name: Set up Python 2.6 environment
|
||||||
|
if: ${{ matrix.python-impl == 'cpython' && matrix.python-version == '2.6' }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
openssl_name=openssl-1.0.2u
|
||||||
|
echo "openssl_name=${openssl_name}" >> "$GITHUB_ENV"
|
||||||
|
openssl_dir=$HOME/.local/opt/$openssl_name
|
||||||
|
echo "openssl_dir=${openssl_dir}" >> "$GITHUB_ENV"
|
||||||
|
PYENV_ROOT=$HOME/.local/share/pyenv
|
||||||
|
echo "PYENV_ROOT=${PYENV_ROOT}" >> "$GITHUB_ENV"
|
||||||
|
sudo apt-get install -y openssl ca-certificates
|
||||||
|
- name: Cache Python 2.6
|
||||||
|
id: cache26
|
||||||
|
if: ${{ matrix.python-version == '2.6' }}
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
key: python-2.6.9
|
||||||
|
path: |
|
||||||
|
${{ env.openssl_dir }}
|
||||||
|
${{ env.PYENV_ROOT }}
|
||||||
|
- name: Build and set up Python 2.6
|
||||||
|
if: ${{ matrix.python-impl == 'cpython' && matrix.python-version == '2.6' && ! steps.cache26.outputs.cache-hit }}
|
||||||
|
# dl and build locally
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
# Install build environment
|
||||||
|
sudo apt-get install -y build-essential llvm libssl-dev tk-dev \
|
||||||
|
libncursesw5-dev libreadline-dev libsqlite3-dev \
|
||||||
|
libffi-dev xz-utils zlib1g-dev libbz2-dev liblzma-dev
|
||||||
|
# Download and install OpenSSL 1.0.2, back in time
|
||||||
|
openssl_name=${{ env.openssl_name }}
|
||||||
|
openssl_targz=${openssl_name}.tar.gz
|
||||||
|
openssl_dir=${{ env.openssl_dir }}
|
||||||
|
openssl_inc=$openssl_dir/include
|
||||||
|
openssl_lib=$openssl_dir/lib
|
||||||
|
openssl_ssl=$openssl_dir/ssl
|
||||||
|
curl -L "https://www.openssl.org/source/$openssl_targz" -o $openssl_targz
|
||||||
|
tar -xf $openssl_targz
|
||||||
|
( cd $openssl_name; \
|
||||||
|
./config --prefix=$openssl_dir --openssldir=${openssl_dir}/ssl \
|
||||||
|
--libdir=lib -Wl,-rpath=${openssl_dir}/lib shared zlib-dynamic && \
|
||||||
|
make && \
|
||||||
|
make install )
|
||||||
|
rm -rf $openssl_name
|
||||||
|
rmdir $openssl_ssl/certs && ln -s /etc/ssl/certs $openssl_ssl/certs
|
||||||
|
# Download PyEnv from its GitHub repository.
|
||||||
|
export PYENV_ROOT=${{ env.PYENV_ROOT }}
|
||||||
|
export PATH=$PYENV_ROOT/bin:$PATH
|
||||||
|
git clone "https://github.com/pyenv/pyenv.git" "$PYENV_ROOT"
|
||||||
|
# Prevent pyenv build trying (and failing) to update pip
|
||||||
|
export GET_PIP=get-pip-2.6.py
|
||||||
|
echo 'import sys; sys.exit(0)' > ${GET_PIP}
|
||||||
|
GET_PIP=$(realpath $GET_PIP)
|
||||||
|
# Build and install Python
|
||||||
|
export CFLAGS="-I$openssl_inc"
|
||||||
|
export LDFLAGS="-L$openssl_lib"
|
||||||
|
export LD_LIBRARY_PATH="$openssl_lib"
|
||||||
|
pyenv install 2.6.9
|
||||||
|
- name: Locate Python 2.6
|
||||||
|
if: ${{ matrix.python-impl == 'cpython' && matrix.python-version == '2.6' }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
PYTHONHOME="${{ env.PYENV_ROOT }}/versions/2.6.9"
|
||||||
|
echo "PYTHONHOME=$PYTHONHOME" >> "$GITHUB_ENV"
|
||||||
|
echo "PATH=${PYTHONHOME}/bin:$PATH" >> "$GITHUB_ENV"
|
||||||
|
echo "LD_LIBRARY_PATH=${{ env.openssl_dir }}/lib${LD_LIBRARY_PATH:+:}${LD_LIBRARY_PATH}" >> "$GITHUB_ENV"
|
||||||
|
#-------- Jython ------
|
||||||
- name: Set up Java 8
|
- name: Set up Java 8
|
||||||
if: ${{ matrix.python-impl == 'jython' }}
|
if: ${{ matrix.python-impl == 'jython' }}
|
||||||
uses: actions/setup-java@v1
|
uses: actions/setup-java@v3
|
||||||
with:
|
with:
|
||||||
java-version: 8
|
java-version: 8
|
||||||
- name: Install Jython
|
distribution: 'zulu'
|
||||||
|
- name: Setup Jython environment
|
||||||
if: ${{ matrix.python-impl == 'jython' }}
|
if: ${{ matrix.python-impl == 'jython' }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "JYTHON_ROOT=${HOME}/jython" >> "$GITHUB_ENV"
|
||||||
|
echo "PIP=pip" >> "$GITHUB_ENV"
|
||||||
|
- name: Cache Jython
|
||||||
|
id: cachejy
|
||||||
|
if: ${{ matrix.python-impl == 'jython' && matrix.python-version == '2.7' }}
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
# 2.7.3 now available, may solve SNI issue
|
||||||
|
key: jython-2.7.1
|
||||||
|
path: |
|
||||||
|
${{ env.JYTHON_ROOT }}
|
||||||
|
- name: Install Jython
|
||||||
|
if: ${{ matrix.python-impl == 'jython' && matrix.python-version == '2.7' && ! steps.cachejy.outputs.cache-hit }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
JYTHON_ROOT="${{ env.JYTHON_ROOT }}"
|
||||||
|
curl -L "https://repo1.maven.org/maven2/org/python/jython-installer/2.7.1/jython-installer-2.7.1.jar" -o jython-installer.jar
|
||||||
|
java -jar jython-installer.jar -s -d "${JYTHON_ROOT}"
|
||||||
|
echo "${JYTHON_ROOT}/bin" >> "$GITHUB_PATH"
|
||||||
|
- name: Set up cached Jython
|
||||||
|
if: ${{ steps.cachejy.outputs.cache-hit }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
JYTHON_ROOT="${{ env.JYTHON_ROOT }}"
|
||||||
|
echo "${JYTHON_ROOT}/bin" >> $GITHUB_PATH
|
||||||
|
- name: Install supporting Python 2.7 if possible
|
||||||
|
if: ${{ steps.cachejy.outputs.cache-hit }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
sudo apt-get install -y python2.7 || true
|
||||||
|
#-------- pip ---------
|
||||||
|
- name: Set up supported Python ${{ matrix.python-version }} pip
|
||||||
|
if: ${{ (matrix.python-version != '3.2' && steps.setup-python.outputs.python-path) || matrix.python-version == '2.7' }}
|
||||||
|
# This step may run in either Linux or Windows
|
||||||
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
wget http://search.maven.org/remotecontent?filepath=org/python/jython-installer/2.7.1/jython-installer-2.7.1.jar -O jython-installer.jar
|
echo "$PATH"
|
||||||
java -jar jython-installer.jar -s -d "$HOME/jython"
|
echo "$PYTHONHOME"
|
||||||
echo "$HOME/jython/bin" >> $GITHUB_PATH
|
# curl is available on both Windows and Linux, -L follows redirects, -O gets name
|
||||||
- name: Install nose
|
python -m ensurepip || python -m pip --version || { \
|
||||||
run: pip install nose
|
get_pip="${{ contains(needs.select.outputs.own-pip-versions, matrix.python-version) && format('{0}/', matrix.python-version) || '' }}"; \
|
||||||
|
curl -L -O "https://bootstrap.pypa.io/pip/${get_pip}get-pip.py"; \
|
||||||
|
python get-pip.py; }
|
||||||
|
- name: Set up Python 2.6 pip
|
||||||
|
if: ${{ matrix.python-version == '2.6' }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
python -m pip --version || { \
|
||||||
|
curl -L -O "https://bootstrap.pypa.io/pip/2.6/get-pip.py"; \
|
||||||
|
curl -L -O "https://files.pythonhosted.org/packages/ac/95/a05b56bb975efa78d3557efa36acaf9cf5d2fd0ee0062060493687432e03/pip-9.0.3-py2.py3-none-any.whl"; \
|
||||||
|
python get-pip.py --no-setuptools --no-wheel pip-9.0.3-py2.py3-none-any.whl; }
|
||||||
|
# work-around to invoke pip module on 2.6: https://bugs.python.org/issue2751
|
||||||
|
echo "PIP=python -m pip.__main__" >> "$GITHUB_ENV"
|
||||||
|
- name: Set up other Python ${{ matrix.python-version }} pip
|
||||||
|
if: ${{ matrix.python-version == '3.2' && steps.setup-python.outputs.python-path }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
python -m pip --version || { \
|
||||||
|
curl -L -O "https://bootstrap.pypa.io/pip/3.2/get-pip.py"; \
|
||||||
|
curl -L -O "https://files.pythonhosted.org/packages/b2/d0/cd115fe345dd6f07ec1c780020a7dfe74966fceeb171e0f20d1d4905b0b7/pip-7.1.2-py2.py3-none-any.whl"; \
|
||||||
|
python get-pip.py --no-setuptools --no-wheel pip-7.1.2-py2.py3-none-any.whl; }
|
||||||
|
#-------- unittest ----
|
||||||
|
- name: Upgrade Unittest for Python 2.6
|
||||||
|
if: ${{ matrix.python-version == '2.6' }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
# Work around deprecation of support for non-SNI clients at PyPI CDN (see https://status.python.org/incidents/hzmjhqsdjqgb)
|
||||||
|
$PIP -qq show unittest2 || { \
|
||||||
|
for u in "65/26/32b8464df2a97e6dd1b656ed26b2c194606c16fe163c695a992b36c11cdf/six-1.13.0-py2.py3-none-any.whl" \
|
||||||
|
"f2/94/3af39d34be01a24a6e65433d19e107099374224905f1e0cc6bbe1fd22a2f/argparse-1.4.0-py2.py3-none-any.whl" \
|
||||||
|
"c7/a3/c5da2a44c85bfbb6eebcfc1dde24933f8704441b98fdde6528f4831757a6/linecache2-1.0.0-py2.py3-none-any.whl" \
|
||||||
|
"17/0a/6ac05a3723017a967193456a2efa0aa9ac4b51456891af1e2353bb9de21e/traceback2-1.4.0-py2.py3-none-any.whl" \
|
||||||
|
"72/20/7f0f433060a962200b7272b8c12ba90ef5b903e218174301d0abfd523813/unittest2-1.1.0-py2.py3-none-any.whl"; do \
|
||||||
|
curl -L -O "https://files.pythonhosted.org/packages/${u}"; \
|
||||||
|
$PIP install ${u##*/}; \
|
||||||
|
done; }
|
||||||
|
# make tests use unittest2
|
||||||
|
for test in ./test/test_*.py ./test/helper.py; do
|
||||||
|
sed -r -i -e '/^import unittest$/s/test/test2 as unittest/' "$test"
|
||||||
|
done
|
||||||
|
#-------- nose --------
|
||||||
|
- name: Install nose for Python ${{ matrix.python-version }}
|
||||||
|
if: ${{ (matrix.python-version != '3.2' && steps.setup-python.outputs.python-path) || (matrix.python-impl == 'cpython' && (matrix.python-version == '2.7' || matrix.python-version == env.next)) }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "$PATH"
|
||||||
|
echo "$PYTHONHOME"
|
||||||
|
# Use PyNose for recent Pythons instead of Nose
|
||||||
|
py3ver="${{ matrix.python-version }}"
|
||||||
|
py3ver=${py3ver#3.}
|
||||||
|
[ "$py3ver" != "${{ matrix.python-version }}" ] && py3ver=${py3ver%.*} || py3ver=0
|
||||||
|
[ "$py3ver" -ge 9 ] && nose=pynose || nose=nose
|
||||||
|
$PIP -qq show $nose || $PIP install $nose
|
||||||
|
- name: Install nose for other Python 2
|
||||||
|
if: ${{ matrix.python-impl == 'jython' || (matrix.python-impl == 'cpython' && matrix.python-version == '2.6') }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
# Work around deprecation of support for non-SNI clients at PyPI CDN (see https://status.python.org/incidents/hzmjhqsdjqgb)
|
||||||
|
$PIP -qq show nose || { \
|
||||||
|
curl -L -O "https://files.pythonhosted.org/packages/99/4f/13fb671119e65c4dce97c60e67d3fd9e6f7f809f2b307e2611f4701205cb/nose-1.3.7-py2-none-any.whl"; \
|
||||||
|
$PIP install nose-1.3.7-py2-none-any.whl; }
|
||||||
|
- name: Install nose for other Python 3
|
||||||
|
if: ${{ matrix.python-version == '3.2' && steps.setup-python.outputs.python-path }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
$PIP -qq show nose || { \
|
||||||
|
curl -L -O "https://files.pythonhosted.org/packages/15/d8/dd071918c040f50fa1cf80da16423af51ff8ce4a0f2399b7bf8de45ac3d9/nose-1.3.7-py3-none-any.whl"; \
|
||||||
|
$PIP install nose-1.3.7-py3-none-any.whl; }
|
||||||
|
- name: Set up nosetest test
|
||||||
|
if: ${{ contains(needs.select.outputs.test-set, matrix.ytdl-test-set ) }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
# set PYTHON_VER
|
||||||
|
PYTHON_VER=${{ matrix.python-version }}
|
||||||
|
[ "${PYTHON_VER#*-}" != "$PYTHON_VER" ] || PYTHON_VER="${{ matrix.python-impl }}-${PYTHON_VER}"
|
||||||
|
echo "PYTHON_VER=$PYTHON_VER" >> "$GITHUB_ENV"
|
||||||
|
echo "PYTHON_IMPL=${{ matrix.python-impl }}" >> "$GITHUB_ENV"
|
||||||
|
# define a test to validate the Python version used by nosetests
|
||||||
|
printf '%s\n' \
|
||||||
|
'from __future__ import unicode_literals' \
|
||||||
|
'import sys, os, platform' \
|
||||||
|
'try:' \
|
||||||
|
' import unittest2 as unittest' \
|
||||||
|
'except ImportError:' \
|
||||||
|
' import unittest' \
|
||||||
|
'class TestPython(unittest.TestCase):' \
|
||||||
|
' def setUp(self):' \
|
||||||
|
' self.ver = os.environ["PYTHON_VER"].split("-")' \
|
||||||
|
' def test_python_ver(self):' \
|
||||||
|
' self.assertEqual(["%d" % v for v in sys.version_info[:2]], self.ver[-1].split(".")[:2])' \
|
||||||
|
' self.assertTrue(sys.version.startswith(self.ver[-1]))' \
|
||||||
|
' self.assertIn(self.ver[0], ",".join((sys.version, platform.python_implementation())).lower())' \
|
||||||
|
' def test_python_impl(self):' \
|
||||||
|
' self.assertIn(platform.python_implementation().lower(), (os.environ["PYTHON_IMPL"], self.ver[0]))' \
|
||||||
|
> test/test_python.py
|
||||||
|
#-------- TESTS -------
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
|
if: ${{ contains(needs.select.outputs.test-set, matrix.ytdl-test-set ) }}
|
||||||
continue-on-error: ${{ matrix.ytdl-test-set == 'download' || matrix.python-impl == 'jython' }}
|
continue-on-error: ${{ matrix.ytdl-test-set == 'download' || matrix.python-impl == 'jython' }}
|
||||||
env:
|
env:
|
||||||
YTDL_TEST_SET: ${{ matrix.ytdl-test-set }}
|
YTDL_TEST_SET: ${{ matrix.ytdl-test-set }}
|
||||||
run: ./devscripts/run_tests.${{ matrix.run-tests-ext }}
|
run: |
|
||||||
|
./devscripts/run_tests.${{ matrix.run-tests-ext }}
|
||||||
flake8:
|
flake8:
|
||||||
name: Linter
|
name: Linter
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install flake8
|
- name: Install flake8
|
||||||
run: pip install flake8
|
run: pip install flake8
|
||||||
- name: Run flake8
|
- name: Run flake8
|
||||||
run: flake8 .
|
run: flake8 .
|
||||||
|
|
||||||
|
@ -0,0 +1 @@
|
|||||||
|
# Empty file needed to make devscripts.utils properly importable from outside
|
@ -0,0 +1,83 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# coding: utf-8
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
"""
|
||||||
|
This script displays the API parameters corresponding to a yt-dl command line
|
||||||
|
|
||||||
|
Example:
|
||||||
|
$ ./cli_to_api.py -f best
|
||||||
|
{u'format': 'best'}
|
||||||
|
$
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import youtube_dl
|
||||||
|
from types import MethodType
|
||||||
|
|
||||||
|
|
||||||
|
def cli_to_api(*opts):
|
||||||
|
YDL = youtube_dl.YoutubeDL
|
||||||
|
|
||||||
|
# to extract the parsed options, break out of YoutubeDL instantiation
|
||||||
|
|
||||||
|
# return options via this Exception
|
||||||
|
class ParseYTDLResult(Exception):
|
||||||
|
def __init__(self, result):
|
||||||
|
super(ParseYTDLResult, self).__init__('result')
|
||||||
|
self.opts = result
|
||||||
|
|
||||||
|
# replacement constructor that raises ParseYTDLResult
|
||||||
|
def ytdl_init(ydl, ydl_opts):
|
||||||
|
super(YDL, ydl).__init__(ydl_opts)
|
||||||
|
raise ParseYTDLResult(ydl_opts)
|
||||||
|
|
||||||
|
# patch in the constructor
|
||||||
|
YDL.__init__ = MethodType(ytdl_init, YDL)
|
||||||
|
|
||||||
|
# core parser
|
||||||
|
def parsed_options(argv):
|
||||||
|
try:
|
||||||
|
youtube_dl._real_main(list(argv))
|
||||||
|
except ParseYTDLResult as result:
|
||||||
|
return result.opts
|
||||||
|
|
||||||
|
# from https://github.com/yt-dlp/yt-dlp/issues/5859#issuecomment-1363938900
|
||||||
|
default = parsed_options([])
|
||||||
|
|
||||||
|
def neq_opt(a, b):
|
||||||
|
if a == b:
|
||||||
|
return False
|
||||||
|
if a is None and repr(type(object)).endswith(".utils.DateRange'>"):
|
||||||
|
return '0001-01-01 - 9999-12-31' != '{0}'.format(b)
|
||||||
|
return a != b
|
||||||
|
|
||||||
|
diff = dict((k, v) for k, v in parsed_options(opts).items() if neq_opt(default[k], v))
|
||||||
|
if 'postprocessors' in diff:
|
||||||
|
diff['postprocessors'] = [pp for pp in diff['postprocessors'] if pp not in default['postprocessors']]
|
||||||
|
return diff
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
from pprint import PrettyPrinter
|
||||||
|
|
||||||
|
pprint = PrettyPrinter()
|
||||||
|
super_format = pprint.format
|
||||||
|
|
||||||
|
def format(object, context, maxlevels, level):
|
||||||
|
if repr(type(object)).endswith(".utils.DateRange'>"):
|
||||||
|
return '{0}: {1}>'.format(repr(object)[:-2], object), True, False
|
||||||
|
return super_format(object, context, maxlevels, level)
|
||||||
|
|
||||||
|
pprint.format = format
|
||||||
|
|
||||||
|
pprint.pprint(cli_to_api(*sys.argv))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
@ -0,0 +1,62 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import functools
|
||||||
|
import os.path
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
dirn = os.path.dirname
|
||||||
|
|
||||||
|
sys.path.insert(0, dirn(dirn(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
from youtube_dl.compat import (
|
||||||
|
compat_kwargs,
|
||||||
|
compat_open as open,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def read_file(fname):
|
||||||
|
with open(fname, encoding='utf-8') as f:
|
||||||
|
return f.read()
|
||||||
|
|
||||||
|
|
||||||
|
def write_file(fname, content, mode='w'):
|
||||||
|
with open(fname, mode, encoding='utf-8') as f:
|
||||||
|
return f.write(content)
|
||||||
|
|
||||||
|
|
||||||
|
def read_version(fname='youtube_dl/version.py'):
|
||||||
|
"""Get the version without importing the package"""
|
||||||
|
exec(compile(read_file(fname), fname, 'exec'))
|
||||||
|
return locals()['__version__']
|
||||||
|
|
||||||
|
|
||||||
|
def get_filename_args(has_infile=False, default_outfile=None):
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
if has_infile:
|
||||||
|
parser.add_argument('infile', help='Input file')
|
||||||
|
kwargs = {'nargs': '?', 'default': default_outfile} if default_outfile else {}
|
||||||
|
kwargs['help'] = 'Output file'
|
||||||
|
parser.add_argument('outfile', **compat_kwargs(kwargs))
|
||||||
|
|
||||||
|
opts = parser.parse_args()
|
||||||
|
if has_infile:
|
||||||
|
return opts.infile, opts.outfile
|
||||||
|
return opts.outfile
|
||||||
|
|
||||||
|
|
||||||
|
def compose_functions(*functions):
|
||||||
|
return lambda x: functools.reduce(lambda y, f: f(y), functions, x)
|
||||||
|
|
||||||
|
|
||||||
|
def run_process(*args, **kwargs):
|
||||||
|
kwargs.setdefault('text', True)
|
||||||
|
kwargs.setdefault('check', True)
|
||||||
|
kwargs.setdefault('capture_output', True)
|
||||||
|
if kwargs['text']:
|
||||||
|
kwargs.setdefault('encoding', 'utf-8')
|
||||||
|
kwargs.setdefault('errors', 'replace')
|
||||||
|
kwargs = compat_kwargs(kwargs)
|
||||||
|
return subprocess.run(args, **kwargs)
|
@ -0,0 +1,272 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# coding: utf-8
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import subprocess
|
||||||
|
import unittest
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
from test.helper import (
|
||||||
|
FakeLogger,
|
||||||
|
FakeYDL,
|
||||||
|
http_server_port,
|
||||||
|
try_rm,
|
||||||
|
)
|
||||||
|
from youtube_dl import YoutubeDL
|
||||||
|
from youtube_dl.compat import (
|
||||||
|
compat_contextlib_suppress,
|
||||||
|
compat_http_cookiejar_Cookie,
|
||||||
|
compat_http_server,
|
||||||
|
compat_kwargs,
|
||||||
|
)
|
||||||
|
from youtube_dl.utils import (
|
||||||
|
encodeFilename,
|
||||||
|
join_nonempty,
|
||||||
|
)
|
||||||
|
from youtube_dl.downloader.external import (
|
||||||
|
Aria2cFD,
|
||||||
|
Aria2pFD,
|
||||||
|
AxelFD,
|
||||||
|
CurlFD,
|
||||||
|
FFmpegFD,
|
||||||
|
HttpieFD,
|
||||||
|
WgetFD,
|
||||||
|
)
|
||||||
|
from youtube_dl.postprocessor import (
|
||||||
|
FFmpegPostProcessor,
|
||||||
|
)
|
||||||
|
import threading
|
||||||
|
|
||||||
|
TEST_SIZE = 10 * 1024
|
||||||
|
|
||||||
|
TEST_COOKIE = {
|
||||||
|
'version': 0,
|
||||||
|
'name': 'test',
|
||||||
|
'value': 'ytdlp',
|
||||||
|
'port': None,
|
||||||
|
'port_specified': False,
|
||||||
|
'domain': '.example.com',
|
||||||
|
'domain_specified': True,
|
||||||
|
'domain_initial_dot': False,
|
||||||
|
'path': '/',
|
||||||
|
'path_specified': True,
|
||||||
|
'secure': False,
|
||||||
|
'expires': None,
|
||||||
|
'discard': False,
|
||||||
|
'comment': None,
|
||||||
|
'comment_url': None,
|
||||||
|
'rest': {},
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_COOKIE_VALUE = join_nonempty('name', 'value', delim='=', from_dict=TEST_COOKIE)
|
||||||
|
|
||||||
|
TEST_INFO = {'url': 'http://www.example.com/'}
|
||||||
|
|
||||||
|
|
||||||
|
def cookiejar_Cookie(**cookie_args):
|
||||||
|
return compat_http_cookiejar_Cookie(**compat_kwargs(cookie_args))
|
||||||
|
|
||||||
|
|
||||||
|
def ifExternalFDAvailable(externalFD):
|
||||||
|
return unittest.skipUnless(externalFD.available(),
|
||||||
|
externalFD.get_basename() + ' not found')
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPTestRequestHandler(compat_http_server.BaseHTTPRequestHandler):
|
||||||
|
def log_message(self, format, *args):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def send_content_range(self, total=None):
|
||||||
|
range_header = self.headers.get('Range')
|
||||||
|
start = end = None
|
||||||
|
if range_header:
|
||||||
|
mobj = re.match(r'bytes=(\d+)-(\d+)', range_header)
|
||||||
|
if mobj:
|
||||||
|
start, end = (int(mobj.group(i)) for i in (1, 2))
|
||||||
|
valid_range = start is not None and end is not None
|
||||||
|
if valid_range:
|
||||||
|
content_range = 'bytes %d-%d' % (start, end)
|
||||||
|
if total:
|
||||||
|
content_range += '/%d' % total
|
||||||
|
self.send_header('Content-Range', content_range)
|
||||||
|
return (end - start + 1) if valid_range else total
|
||||||
|
|
||||||
|
def serve(self, range=True, content_length=True):
|
||||||
|
self.send_response(200)
|
||||||
|
self.send_header('Content-Type', 'video/mp4')
|
||||||
|
size = TEST_SIZE
|
||||||
|
if range:
|
||||||
|
size = self.send_content_range(TEST_SIZE)
|
||||||
|
if content_length:
|
||||||
|
self.send_header('Content-Length', size)
|
||||||
|
self.end_headers()
|
||||||
|
self.wfile.write(b'#' * size)
|
||||||
|
|
||||||
|
def do_GET(self):
|
||||||
|
if self.path == '/regular':
|
||||||
|
self.serve()
|
||||||
|
elif self.path == '/no-content-length':
|
||||||
|
self.serve(content_length=False)
|
||||||
|
elif self.path == '/no-range':
|
||||||
|
self.serve(range=False)
|
||||||
|
elif self.path == '/no-range-no-content-length':
|
||||||
|
self.serve(range=False, content_length=False)
|
||||||
|
else:
|
||||||
|
assert False, 'unrecognised server path'
|
||||||
|
|
||||||
|
|
||||||
|
@ifExternalFDAvailable(Aria2pFD)
|
||||||
|
class TestAria2pFD(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.httpd = compat_http_server.HTTPServer(
|
||||||
|
('127.0.0.1', 0), HTTPTestRequestHandler)
|
||||||
|
self.port = http_server_port(self.httpd)
|
||||||
|
self.server_thread = threading.Thread(target=self.httpd.serve_forever)
|
||||||
|
self.server_thread.daemon = True
|
||||||
|
self.server_thread.start()
|
||||||
|
|
||||||
|
def download(self, params, ep):
|
||||||
|
with subprocess.Popen(
|
||||||
|
['aria2c', '--enable-rpc'],
|
||||||
|
stdout=subprocess.DEVNULL,
|
||||||
|
stderr=subprocess.DEVNULL
|
||||||
|
) as process:
|
||||||
|
if not process.poll():
|
||||||
|
filename = 'testfile.mp4'
|
||||||
|
params['logger'] = FakeLogger()
|
||||||
|
params['outtmpl'] = filename
|
||||||
|
ydl = YoutubeDL(params)
|
||||||
|
try_rm(encodeFilename(filename))
|
||||||
|
self.assertEqual(ydl.download(['http://127.0.0.1:%d/%s' % (self.port, ep)]), 0)
|
||||||
|
self.assertEqual(os.path.getsize(encodeFilename(filename)), TEST_SIZE)
|
||||||
|
try_rm(encodeFilename(filename))
|
||||||
|
process.kill()
|
||||||
|
|
||||||
|
def download_all(self, params):
|
||||||
|
for ep in ('regular', 'no-content-length', 'no-range', 'no-range-no-content-length'):
|
||||||
|
self.download(params, ep)
|
||||||
|
|
||||||
|
def test_regular(self):
|
||||||
|
self.download_all({'external_downloader': 'aria2p'})
|
||||||
|
|
||||||
|
def test_chunked(self):
|
||||||
|
self.download_all({
|
||||||
|
'external_downloader': 'aria2p',
|
||||||
|
'http_chunk_size': 1000,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@ifExternalFDAvailable(HttpieFD)
|
||||||
|
class TestHttpieFD(unittest.TestCase):
|
||||||
|
def test_make_cmd(self):
|
||||||
|
with FakeYDL() as ydl:
|
||||||
|
downloader = HttpieFD(ydl, {})
|
||||||
|
self.assertEqual(
|
||||||
|
downloader._make_cmd('test', TEST_INFO),
|
||||||
|
['http', '--download', '--output', 'test', 'http://www.example.com/'])
|
||||||
|
|
||||||
|
# Test cookie header is added
|
||||||
|
ydl.cookiejar.set_cookie(cookiejar_Cookie(**TEST_COOKIE))
|
||||||
|
self.assertEqual(
|
||||||
|
downloader._make_cmd('test', TEST_INFO),
|
||||||
|
['http', '--download', '--output', 'test',
|
||||||
|
'http://www.example.com/', 'Cookie:' + TEST_COOKIE_VALUE])
|
||||||
|
|
||||||
|
|
||||||
|
@ifExternalFDAvailable(AxelFD)
|
||||||
|
class TestAxelFD(unittest.TestCase):
|
||||||
|
def test_make_cmd(self):
|
||||||
|
with FakeYDL() as ydl:
|
||||||
|
downloader = AxelFD(ydl, {})
|
||||||
|
self.assertEqual(
|
||||||
|
downloader._make_cmd('test', TEST_INFO),
|
||||||
|
['axel', '-o', 'test', '--', 'http://www.example.com/'])
|
||||||
|
|
||||||
|
# Test cookie header is added
|
||||||
|
ydl.cookiejar.set_cookie(cookiejar_Cookie(**TEST_COOKIE))
|
||||||
|
self.assertEqual(
|
||||||
|
downloader._make_cmd('test', TEST_INFO),
|
||||||
|
['axel', '-o', 'test', '-H', 'Cookie: ' + TEST_COOKIE_VALUE,
|
||||||
|
'--max-redirect=0', '--', 'http://www.example.com/'])
|
||||||
|
|
||||||
|
|
||||||
|
@ifExternalFDAvailable(WgetFD)
|
||||||
|
class TestWgetFD(unittest.TestCase):
|
||||||
|
def test_make_cmd(self):
|
||||||
|
with FakeYDL() as ydl:
|
||||||
|
downloader = WgetFD(ydl, {})
|
||||||
|
self.assertNotIn('--load-cookies', downloader._make_cmd('test', TEST_INFO))
|
||||||
|
# Test cookiejar tempfile arg is added
|
||||||
|
ydl.cookiejar.set_cookie(cookiejar_Cookie(**TEST_COOKIE))
|
||||||
|
self.assertIn('--load-cookies', downloader._make_cmd('test', TEST_INFO))
|
||||||
|
|
||||||
|
|
||||||
|
@ifExternalFDAvailable(CurlFD)
|
||||||
|
class TestCurlFD(unittest.TestCase):
|
||||||
|
def test_make_cmd(self):
|
||||||
|
with FakeYDL() as ydl:
|
||||||
|
downloader = CurlFD(ydl, {})
|
||||||
|
self.assertNotIn('--cookie', downloader._make_cmd('test', TEST_INFO))
|
||||||
|
# Test cookie header is added
|
||||||
|
ydl.cookiejar.set_cookie(cookiejar_Cookie(**TEST_COOKIE))
|
||||||
|
self.assertIn('--cookie', downloader._make_cmd('test', TEST_INFO))
|
||||||
|
self.assertIn(TEST_COOKIE_VALUE, downloader._make_cmd('test', TEST_INFO))
|
||||||
|
|
||||||
|
|
||||||
|
@ifExternalFDAvailable(Aria2cFD)
|
||||||
|
class TestAria2cFD(unittest.TestCase):
|
||||||
|
def test_make_cmd(self):
|
||||||
|
with FakeYDL() as ydl:
|
||||||
|
downloader = Aria2cFD(ydl, {})
|
||||||
|
downloader._make_cmd('test', TEST_INFO)
|
||||||
|
self.assertFalse(hasattr(downloader, '_cookies_tempfile'))
|
||||||
|
|
||||||
|
# Test cookiejar tempfile arg is added
|
||||||
|
ydl.cookiejar.set_cookie(cookiejar_Cookie(**TEST_COOKIE))
|
||||||
|
cmd = downloader._make_cmd('test', TEST_INFO)
|
||||||
|
self.assertIn('--load-cookies=%s' % downloader._cookies_tempfile, cmd)
|
||||||
|
|
||||||
|
|
||||||
|
# Handle delegated availability
|
||||||
|
def ifFFmpegFDAvailable(externalFD):
|
||||||
|
# raise SkipTest, or set False!
|
||||||
|
avail = ifExternalFDAvailable(externalFD) and False
|
||||||
|
with compat_contextlib_suppress(Exception):
|
||||||
|
avail = FFmpegPostProcessor(downloader=None).available
|
||||||
|
return unittest.skipUnless(
|
||||||
|
avail, externalFD.get_basename() + ' not found')
|
||||||
|
|
||||||
|
|
||||||
|
@ifFFmpegFDAvailable(FFmpegFD)
|
||||||
|
class TestFFmpegFD(unittest.TestCase):
|
||||||
|
_args = []
|
||||||
|
|
||||||
|
def _test_cmd(self, args):
|
||||||
|
self._args = args
|
||||||
|
|
||||||
|
def test_make_cmd(self):
|
||||||
|
with FakeYDL() as ydl:
|
||||||
|
downloader = FFmpegFD(ydl, {})
|
||||||
|
downloader._debug_cmd = self._test_cmd
|
||||||
|
info_dict = TEST_INFO.copy()
|
||||||
|
info_dict['ext'] = 'mp4'
|
||||||
|
|
||||||
|
downloader._call_downloader('test', info_dict)
|
||||||
|
self.assertEqual(self._args, [
|
||||||
|
'ffmpeg', '-y', '-i', 'http://www.example.com/',
|
||||||
|
'-c', 'copy', '-f', 'mp4', 'file:test'])
|
||||||
|
|
||||||
|
# Test cookies arg is added
|
||||||
|
ydl.cookiejar.set_cookie(cookiejar_Cookie(**TEST_COOKIE))
|
||||||
|
downloader._call_downloader('test', info_dict)
|
||||||
|
self.assertEqual(self._args, [
|
||||||
|
'ffmpeg', '-y', '-cookies', TEST_COOKIE_VALUE + '; path=/; domain=.example.com;\r\n',
|
||||||
|
'-i', 'http://www.example.com/', '-c', 'copy', '-f', 'mp4', 'file:test'])
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
@ -0,0 +1,509 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# coding: utf-8
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
from youtube_dl.traversal import (
|
||||||
|
dict_get,
|
||||||
|
get_first,
|
||||||
|
T,
|
||||||
|
traverse_obj,
|
||||||
|
)
|
||||||
|
from youtube_dl.compat import (
|
||||||
|
compat_etree_fromstring,
|
||||||
|
compat_http_cookies,
|
||||||
|
compat_str,
|
||||||
|
)
|
||||||
|
from youtube_dl.utils import (
|
||||||
|
int_or_none,
|
||||||
|
str_or_none,
|
||||||
|
)
|
||||||
|
|
||||||
|
_TEST_DATA = {
|
||||||
|
100: 100,
|
||||||
|
1.2: 1.2,
|
||||||
|
'str': 'str',
|
||||||
|
'None': None,
|
||||||
|
'...': Ellipsis,
|
||||||
|
'urls': [
|
||||||
|
{'index': 0, 'url': 'https://www.example.com/0'},
|
||||||
|
{'index': 1, 'url': 'https://www.example.com/1'},
|
||||||
|
],
|
||||||
|
'data': (
|
||||||
|
{'index': 2},
|
||||||
|
{'index': 3},
|
||||||
|
),
|
||||||
|
'dict': {},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info < (3, 0):
|
||||||
|
class _TestCase(unittest.TestCase):
|
||||||
|
|
||||||
|
def assertCountEqual(self, *args, **kwargs):
|
||||||
|
return self.assertItemsEqual(*args, **kwargs)
|
||||||
|
else:
|
||||||
|
_TestCase = unittest.TestCase
|
||||||
|
|
||||||
|
|
||||||
|
class TestTraversal(_TestCase):
|
||||||
|
def assertMaybeCountEqual(self, *args, **kwargs):
|
||||||
|
if sys.version_info < (3, 7):
|
||||||
|
# random dict order
|
||||||
|
return self.assertCountEqual(*args, **kwargs)
|
||||||
|
else:
|
||||||
|
return self.assertEqual(*args, **kwargs)
|
||||||
|
|
||||||
|
def test_traverse_obj(self):
|
||||||
|
# instant compat
|
||||||
|
str = compat_str
|
||||||
|
|
||||||
|
# define a pukka Iterable
|
||||||
|
def iter_range(stop):
|
||||||
|
for from_ in range(stop):
|
||||||
|
yield from_
|
||||||
|
|
||||||
|
# Test base functionality
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ('str',)), 'str',
|
||||||
|
msg='allow tuple path')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ['str']), 'str',
|
||||||
|
msg='allow list path')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, (value for value in ("str",))), 'str',
|
||||||
|
msg='allow iterable path')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, 'str'), 'str',
|
||||||
|
msg='single items should be treated as a path')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, None), _TEST_DATA)
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, 100), 100)
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, 1.2), 1.2)
|
||||||
|
|
||||||
|
# Test Ellipsis behavior
|
||||||
|
self.assertCountEqual(traverse_obj(_TEST_DATA, Ellipsis),
|
||||||
|
(item for item in _TEST_DATA.values() if item not in (None, {})),
|
||||||
|
msg='`...` should give all non-discarded values')
|
||||||
|
self.assertCountEqual(traverse_obj(_TEST_DATA, ('urls', 0, Ellipsis)), _TEST_DATA['urls'][0].values(),
|
||||||
|
msg='`...` selection for dicts should select all values')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, (Ellipsis, Ellipsis, 'url')),
|
||||||
|
['https://www.example.com/0', 'https://www.example.com/1'],
|
||||||
|
msg='nested `...` queries should work')
|
||||||
|
self.assertCountEqual(traverse_obj(_TEST_DATA, (Ellipsis, Ellipsis, 'index')), iter_range(4),
|
||||||
|
msg='`...` query result should be flattened')
|
||||||
|
self.assertEqual(traverse_obj(iter(range(4)), Ellipsis), list(range(4)),
|
||||||
|
msg='`...` should accept iterables')
|
||||||
|
|
||||||
|
# Test function as key
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, lambda x, y: x == 'urls' and isinstance(y, list)),
|
||||||
|
[_TEST_DATA['urls']],
|
||||||
|
msg='function as query key should perform a filter based on (key, value)')
|
||||||
|
self.assertCountEqual(traverse_obj(_TEST_DATA, lambda _, x: isinstance(x[0], str)), set(('str',)),
|
||||||
|
msg='exceptions in the query function should be caught')
|
||||||
|
self.assertEqual(traverse_obj(iter(range(4)), lambda _, x: x % 2 == 0), [0, 2],
|
||||||
|
msg='function key should accept iterables')
|
||||||
|
if __debug__:
|
||||||
|
with self.assertRaises(Exception, msg='Wrong function signature should raise in debug'):
|
||||||
|
traverse_obj(_TEST_DATA, lambda a: Ellipsis)
|
||||||
|
with self.assertRaises(Exception, msg='Wrong function signature should raise in debug'):
|
||||||
|
traverse_obj(_TEST_DATA, lambda a, b, c: Ellipsis)
|
||||||
|
|
||||||
|
# Test set as key (transformation/type, like `expected_type`)
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, (Ellipsis, T(str.upper), )), ['STR'],
|
||||||
|
msg='Function in set should be a transformation')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ('fail', T(lambda _: 'const'))), 'const',
|
||||||
|
msg='Function in set should always be called')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, (Ellipsis, T(str))), ['str'],
|
||||||
|
msg='Type in set should be a type filter')
|
||||||
|
self.assertMaybeCountEqual(traverse_obj(_TEST_DATA, (Ellipsis, T(str, int))), [100, 'str'],
|
||||||
|
msg='Multiple types in set should be a type filter')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, T(dict)), _TEST_DATA,
|
||||||
|
msg='A single set should be wrapped into a path')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, (Ellipsis, T(str.upper))), ['STR'],
|
||||||
|
msg='Transformation function should not raise')
|
||||||
|
self.assertMaybeCountEqual(traverse_obj(_TEST_DATA, (Ellipsis, T(str_or_none))),
|
||||||
|
[item for item in map(str_or_none, _TEST_DATA.values()) if item is not None],
|
||||||
|
msg='Function in set should be a transformation')
|
||||||
|
if __debug__:
|
||||||
|
with self.assertRaises(Exception, msg='Sets with length != 1 should raise in debug'):
|
||||||
|
traverse_obj(_TEST_DATA, set())
|
||||||
|
with self.assertRaises(Exception, msg='Sets with length != 1 should raise in debug'):
|
||||||
|
traverse_obj(_TEST_DATA, set((str.upper, str)))
|
||||||
|
|
||||||
|
# Test `slice` as a key
|
||||||
|
_SLICE_DATA = [0, 1, 2, 3, 4]
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ('dict', slice(1))), None,
|
||||||
|
msg='slice on a dictionary should not throw')
|
||||||
|
self.assertEqual(traverse_obj(_SLICE_DATA, slice(1)), _SLICE_DATA[:1],
|
||||||
|
msg='slice key should apply slice to sequence')
|
||||||
|
self.assertEqual(traverse_obj(_SLICE_DATA, slice(1, 2)), _SLICE_DATA[1:2],
|
||||||
|
msg='slice key should apply slice to sequence')
|
||||||
|
self.assertEqual(traverse_obj(_SLICE_DATA, slice(1, 4, 2)), _SLICE_DATA[1:4:2],
|
||||||
|
msg='slice key should apply slice to sequence')
|
||||||
|
|
||||||
|
# Test alternative paths
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, 'fail', 'str'), 'str',
|
||||||
|
msg='multiple `paths` should be treated as alternative paths')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, 'str', 100), 'str',
|
||||||
|
msg='alternatives should exit early')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, 'fail', 'fail'), None,
|
||||||
|
msg='alternatives should return `default` if exhausted')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, (Ellipsis, 'fail'), 100), 100,
|
||||||
|
msg='alternatives should track their own branching return')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ('dict', Ellipsis), ('data', Ellipsis)), list(_TEST_DATA['data']),
|
||||||
|
msg='alternatives on empty objects should search further')
|
||||||
|
|
||||||
|
# Test branch and path nesting
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', (3, 0), 'url')), ['https://www.example.com/0'],
|
||||||
|
msg='tuple as key should be treated as branches')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', [3, 0], 'url')), ['https://www.example.com/0'],
|
||||||
|
msg='list as key should be treated as branches')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', ((1, 'fail'), (0, 'url')))), ['https://www.example.com/0'],
|
||||||
|
msg='double nesting in path should be treated as paths')
|
||||||
|
self.assertEqual(traverse_obj(['0', [1, 2]], [(0, 1), 0]), [1],
|
||||||
|
msg='do not fail early on branching')
|
||||||
|
self.assertCountEqual(traverse_obj(_TEST_DATA, ('urls', ((1, ('fail', 'url')), (0, 'url')))),
|
||||||
|
['https://www.example.com/0', 'https://www.example.com/1'],
|
||||||
|
msg='triple nesting in path should be treated as branches')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', ('fail', (Ellipsis, 'url')))),
|
||||||
|
['https://www.example.com/0', 'https://www.example.com/1'],
|
||||||
|
msg='ellipsis as branch path start gets flattened')
|
||||||
|
|
||||||
|
# Test dictionary as key
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2}), {0: 100, 1: 1.2},
|
||||||
|
msg='dict key should result in a dict with the same keys')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', 0, 'url')}),
|
||||||
|
{0: 'https://www.example.com/0'},
|
||||||
|
msg='dict key should allow paths')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', (3, 0), 'url')}),
|
||||||
|
{0: ['https://www.example.com/0']},
|
||||||
|
msg='tuple in dict path should be treated as branches')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', ((1, 'fail'), (0, 'url')))}),
|
||||||
|
{0: ['https://www.example.com/0']},
|
||||||
|
msg='double nesting in dict path should be treated as paths')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', ((1, ('fail', 'url')), (0, 'url')))}),
|
||||||
|
{0: ['https://www.example.com/1', 'https://www.example.com/0']},
|
||||||
|
msg='triple nesting in dict path should be treated as branches')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'fail'}), {},
|
||||||
|
msg='remove `None` values when top level dict key fails')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'fail'}, default=Ellipsis), {0: Ellipsis},
|
||||||
|
msg='use `default` if key fails and `default`')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'dict'}), {},
|
||||||
|
msg='remove empty values when dict key')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'dict'}, default=Ellipsis), {0: Ellipsis},
|
||||||
|
msg='use `default` when dict key and a default')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 'fail'}}), {},
|
||||||
|
msg='remove empty values when nested dict key fails')
|
||||||
|
self.assertEqual(traverse_obj(None, {0: 'fail'}), {},
|
||||||
|
msg='default to dict if pruned')
|
||||||
|
self.assertEqual(traverse_obj(None, {0: 'fail'}, default=Ellipsis), {0: Ellipsis},
|
||||||
|
msg='default to dict if pruned and default is given')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 'fail'}}, default=Ellipsis), {0: {0: Ellipsis}},
|
||||||
|
msg='use nested `default` when nested dict key fails and `default`')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('dict', Ellipsis)}), {},
|
||||||
|
msg='remove key if branch in dict key not successful')
|
||||||
|
|
||||||
|
# Testing default parameter behavior
|
||||||
|
_DEFAULT_DATA = {'None': None, 'int': 0, 'list': []}
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail'), None,
|
||||||
|
msg='default value should be `None`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail', 'fail', default=Ellipsis), Ellipsis,
|
||||||
|
msg='chained fails should result in default')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'None', 'int'), 0,
|
||||||
|
msg='should not short cirquit on `None`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail', default=1), 1,
|
||||||
|
msg='invalid dict key should result in `default`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'None', default=1), 1,
|
||||||
|
msg='`None` is a deliberate sentinel and should become `default`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, ('list', 10)), None,
|
||||||
|
msg='`IndexError` should result in `default`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, (Ellipsis, 'fail'), default=1), 1,
|
||||||
|
msg='if branched but not successful return `default` if defined, not `[]`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, (Ellipsis, 'fail'), default=None), None,
|
||||||
|
msg='if branched but not successful return `default` even if `default` is `None`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, (Ellipsis, 'fail')), [],
|
||||||
|
msg='if branched but not successful return `[]`, not `default`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, ('list', Ellipsis)), [],
|
||||||
|
msg='if branched but object is empty return `[]`, not `default`')
|
||||||
|
self.assertEqual(traverse_obj(None, Ellipsis), [],
|
||||||
|
msg='if branched but object is `None` return `[]`, not `default`')
|
||||||
|
self.assertEqual(traverse_obj({0: None}, (0, Ellipsis)), [],
|
||||||
|
msg='if branched but state is `None` return `[]`, not `default`')
|
||||||
|
|
||||||
|
branching_paths = [
|
||||||
|
('fail', Ellipsis),
|
||||||
|
(Ellipsis, 'fail'),
|
||||||
|
100 * ('fail',) + (Ellipsis,),
|
||||||
|
(Ellipsis,) + 100 * ('fail',),
|
||||||
|
]
|
||||||
|
for branching_path in branching_paths:
|
||||||
|
self.assertEqual(traverse_obj({}, branching_path), [],
|
||||||
|
msg='if branched but state is `None`, return `[]` (not `default`)')
|
||||||
|
self.assertEqual(traverse_obj({}, 'fail', branching_path), [],
|
||||||
|
msg='if branching in last alternative and previous did not match, return `[]` (not `default`)')
|
||||||
|
self.assertEqual(traverse_obj({0: 'x'}, 0, branching_path), 'x',
|
||||||
|
msg='if branching in last alternative and previous did match, return single value')
|
||||||
|
self.assertEqual(traverse_obj({0: 'x'}, branching_path, 0), 'x',
|
||||||
|
msg='if branching in first alternative and non-branching path does match, return single value')
|
||||||
|
self.assertEqual(traverse_obj({}, branching_path, 'fail'), None,
|
||||||
|
msg='if branching in first alternative and non-branching path does not match, return `default`')
|
||||||
|
|
||||||
|
# Testing expected_type behavior
|
||||||
|
_EXPECTED_TYPE_DATA = {'str': 'str', 'int': 0}
|
||||||
|
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=str),
|
||||||
|
'str', msg='accept matching `expected_type` type')
|
||||||
|
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=int),
|
||||||
|
None, msg='reject non-matching `expected_type` type')
|
||||||
|
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'int', expected_type=lambda x: str(x)),
|
||||||
|
'0', msg='transform type using type function')
|
||||||
|
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=lambda _: 1 / 0),
|
||||||
|
None, msg='wrap expected_type function in try_call')
|
||||||
|
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, Ellipsis, expected_type=str),
|
||||||
|
['str'], msg='eliminate items that expected_type fails on')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2}, expected_type=int),
|
||||||
|
{0: 100}, msg='type as expected_type should filter dict values')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2, 2: 'None'}, expected_type=str_or_none),
|
||||||
|
{0: '100', 1: '1.2'}, msg='function as expected_type should transform dict values')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ({0: 1.2}, 0, set((int_or_none,))), expected_type=int),
|
||||||
|
1, msg='expected_type should not filter non-final dict values')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 100, 1: 'str'}}, expected_type=int),
|
||||||
|
{0: {0: 100}}, msg='expected_type should transform deep dict values')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, [({0: '...'}, {0: '...'})], expected_type=type(Ellipsis)),
|
||||||
|
[{0: Ellipsis}, {0: Ellipsis}], msg='expected_type should transform branched dict values')
|
||||||
|
self.assertEqual(traverse_obj({1: {3: 4}}, [(1, 2), 3], expected_type=int),
|
||||||
|
[4], msg='expected_type regression for type matching in tuple branching')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ['data', Ellipsis], expected_type=int),
|
||||||
|
[], msg='expected_type regression for type matching in dict result')
|
||||||
|
|
||||||
|
# Test get_all behavior
|
||||||
|
_GET_ALL_DATA = {'key': [0, 1, 2]}
|
||||||
|
self.assertEqual(traverse_obj(_GET_ALL_DATA, ('key', Ellipsis), get_all=False), 0,
|
||||||
|
msg='if not `get_all`, return only first matching value')
|
||||||
|
self.assertEqual(traverse_obj(_GET_ALL_DATA, Ellipsis, get_all=False), [0, 1, 2],
|
||||||
|
msg='do not overflatten if not `get_all`')
|
||||||
|
|
||||||
|
# Test casesense behavior
|
||||||
|
_CASESENSE_DATA = {
|
||||||
|
'KeY': 'value0',
|
||||||
|
0: {
|
||||||
|
'KeY': 'value1',
|
||||||
|
0: {'KeY': 'value2'},
|
||||||
|
},
|
||||||
|
# FULLWIDTH LATIN CAPITAL LETTER K
|
||||||
|
'\uff2bey': 'value3',
|
||||||
|
}
|
||||||
|
self.assertEqual(traverse_obj(_CASESENSE_DATA, 'key'), None,
|
||||||
|
msg='dict keys should be case sensitive unless `casesense`')
|
||||||
|
self.assertEqual(traverse_obj(_CASESENSE_DATA, 'keY',
|
||||||
|
casesense=False), 'value0',
|
||||||
|
msg='allow non matching key case if `casesense`')
|
||||||
|
self.assertEqual(traverse_obj(_CASESENSE_DATA, '\uff4bey', # FULLWIDTH LATIN SMALL LETTER K
|
||||||
|
casesense=False), 'value3',
|
||||||
|
msg='allow non matching Unicode key case if `casesense`')
|
||||||
|
self.assertEqual(traverse_obj(_CASESENSE_DATA, (0, ('keY',)),
|
||||||
|
casesense=False), ['value1'],
|
||||||
|
msg='allow non matching key case in branch if `casesense`')
|
||||||
|
self.assertEqual(traverse_obj(_CASESENSE_DATA, (0, ((0, 'keY'),)),
|
||||||
|
casesense=False), ['value2'],
|
||||||
|
msg='allow non matching key case in branch path if `casesense`')
|
||||||
|
|
||||||
|
# Test traverse_string behavior
|
||||||
|
_TRAVERSE_STRING_DATA = {'str': 'str', 1.2: 1.2}
|
||||||
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', 0)), None,
|
||||||
|
msg='do not traverse into string if not `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', 0),
|
||||||
|
_traverse_string=True), 's',
|
||||||
|
msg='traverse into string if `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, (1.2, 1),
|
||||||
|
_traverse_string=True), '.',
|
||||||
|
msg='traverse into converted data if `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', Ellipsis),
|
||||||
|
_traverse_string=True), 'str',
|
||||||
|
msg='`...` should result in string (same value) if `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', slice(0, None, 2)),
|
||||||
|
_traverse_string=True), 'sr',
|
||||||
|
msg='`slice` should result in string if `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', lambda i, v: i or v == 's'),
|
||||||
|
_traverse_string=True), 'str',
|
||||||
|
msg='function should result in string if `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', (0, 2)),
|
||||||
|
_traverse_string=True), ['s', 'r'],
|
||||||
|
msg='branching should result in list if `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj({}, (0, Ellipsis), _traverse_string=True), [],
|
||||||
|
msg='branching should result in list if `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj({}, (0, lambda x, y: True), _traverse_string=True), [],
|
||||||
|
msg='branching should result in list if `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj({}, (0, slice(1)), _traverse_string=True), [],
|
||||||
|
msg='branching should result in list if `traverse_string`')
|
||||||
|
|
||||||
|
# Test re.Match as input obj
|
||||||
|
mobj = re.match(r'^0(12)(?P<group>3)(4)?$', '0123')
|
||||||
|
self.assertEqual(traverse_obj(mobj, Ellipsis), [x for x in mobj.groups() if x is not None],
|
||||||
|
msg='`...` on a `re.Match` should give its `groups()`')
|
||||||
|
self.assertEqual(traverse_obj(mobj, lambda k, _: k in (0, 2)), ['0123', '3'],
|
||||||
|
msg='function on a `re.Match` should give groupno, value starting at 0')
|
||||||
|
self.assertEqual(traverse_obj(mobj, 'group'), '3',
|
||||||
|
msg='str key on a `re.Match` should give group with that name')
|
||||||
|
self.assertEqual(traverse_obj(mobj, 2), '3',
|
||||||
|
msg='int key on a `re.Match` should give group with that name')
|
||||||
|
self.assertEqual(traverse_obj(mobj, 'gRoUp', casesense=False), '3',
|
||||||
|
msg='str key on a `re.Match` should respect casesense')
|
||||||
|
self.assertEqual(traverse_obj(mobj, 'fail'), None,
|
||||||
|
msg='failing str key on a `re.Match` should return `default`')
|
||||||
|
self.assertEqual(traverse_obj(mobj, 'gRoUpS', casesense=False), None,
|
||||||
|
msg='failing str key on a `re.Match` should return `default`')
|
||||||
|
self.assertEqual(traverse_obj(mobj, 8), None,
|
||||||
|
msg='failing int key on a `re.Match` should return `default`')
|
||||||
|
self.assertEqual(traverse_obj(mobj, lambda k, _: k in (0, 'group')), ['0123', '3'],
|
||||||
|
msg='function on a `re.Match` should give group name as well')
|
||||||
|
|
||||||
|
# Test xml.etree.ElementTree.Element as input obj
|
||||||
|
etree = compat_etree_fromstring('''<?xml version="1.0"?>
|
||||||
|
<data>
|
||||||
|
<country name="Liechtenstein">
|
||||||
|
<rank>1</rank>
|
||||||
|
<year>2008</year>
|
||||||
|
<gdppc>141100</gdppc>
|
||||||
|
<neighbor name="Austria" direction="E"/>
|
||||||
|
<neighbor name="Switzerland" direction="W"/>
|
||||||
|
</country>
|
||||||
|
<country name="Singapore">
|
||||||
|
<rank>4</rank>
|
||||||
|
<year>2011</year>
|
||||||
|
<gdppc>59900</gdppc>
|
||||||
|
<neighbor name="Malaysia" direction="N"/>
|
||||||
|
</country>
|
||||||
|
<country name="Panama">
|
||||||
|
<rank>68</rank>
|
||||||
|
<year>2011</year>
|
||||||
|
<gdppc>13600</gdppc>
|
||||||
|
<neighbor name="Costa Rica" direction="W"/>
|
||||||
|
<neighbor name="Colombia" direction="E"/>
|
||||||
|
</country>
|
||||||
|
</data>''')
|
||||||
|
self.assertEqual(traverse_obj(etree, ''), etree,
|
||||||
|
msg='empty str key should return the element itself')
|
||||||
|
self.assertEqual(traverse_obj(etree, 'country'), list(etree),
|
||||||
|
msg='str key should return all children with that tag name')
|
||||||
|
self.assertEqual(traverse_obj(etree, Ellipsis), list(etree),
|
||||||
|
msg='`...` as key should return all children')
|
||||||
|
self.assertEqual(traverse_obj(etree, lambda _, x: x[0].text == '4'), [etree[1]],
|
||||||
|
msg='function as key should get element as value')
|
||||||
|
self.assertEqual(traverse_obj(etree, lambda i, _: i == 1), [etree[1]],
|
||||||
|
msg='function as key should get index as key')
|
||||||
|
self.assertEqual(traverse_obj(etree, 0), etree[0],
|
||||||
|
msg='int key should return the nth child')
|
||||||
|
self.assertEqual(traverse_obj(etree, './/neighbor/@name'),
|
||||||
|
['Austria', 'Switzerland', 'Malaysia', 'Costa Rica', 'Colombia'],
|
||||||
|
msg='`@<attribute>` at end of path should give that attribute')
|
||||||
|
self.assertEqual(traverse_obj(etree, '//neighbor/@fail'), [None, None, None, None, None],
|
||||||
|
msg='`@<nonexistent>` at end of path should give `None`')
|
||||||
|
self.assertEqual(traverse_obj(etree, ('//neighbor/@', 2)), {'name': 'Malaysia', 'direction': 'N'},
|
||||||
|
msg='`@` should give the full attribute dict')
|
||||||
|
self.assertEqual(traverse_obj(etree, '//year/text()'), ['2008', '2011', '2011'],
|
||||||
|
msg='`text()` at end of path should give the inner text')
|
||||||
|
self.assertEqual(traverse_obj(etree, '//*[@direction]/@direction'), ['E', 'W', 'N', 'W', 'E'],
|
||||||
|
msg='full python xpath features should be supported')
|
||||||
|
self.assertEqual(traverse_obj(etree, (0, '@name')), 'Liechtenstein',
|
||||||
|
msg='special transformations should act on current element')
|
||||||
|
self.assertEqual(traverse_obj(etree, ('country', 0, Ellipsis, 'text()', T(int_or_none))), [1, 2008, 141100],
|
||||||
|
msg='special transformations should act on current element')
|
||||||
|
|
||||||
|
def test_traversal_unbranching(self):
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, [(100, 1.2), all]), [100, 1.2],
|
||||||
|
msg='`all` should give all results as list')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, [(100, 1.2), any]), 100,
|
||||||
|
msg='`any` should give the first result')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, [100, all]), [100],
|
||||||
|
msg='`all` should give list if non branching')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, [100, any]), 100,
|
||||||
|
msg='`any` should give single item if non branching')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, [('dict', 'None', 100), all]), [100],
|
||||||
|
msg='`all` should filter `None` and empty dict')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, [('dict', 'None', 100), any]), 100,
|
||||||
|
msg='`any` should filter `None` and empty dict')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, [{
|
||||||
|
'all': [('dict', 'None', 100, 1.2), all],
|
||||||
|
'any': [('dict', 'None', 100, 1.2), any],
|
||||||
|
}]), {'all': [100, 1.2], 'any': 100},
|
||||||
|
msg='`all`/`any` should apply to each dict path separately')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, [{
|
||||||
|
'all': [('dict', 'None', 100, 1.2), all],
|
||||||
|
'any': [('dict', 'None', 100, 1.2), any],
|
||||||
|
}], get_all=False), {'all': [100, 1.2], 'any': 100},
|
||||||
|
msg='`all`/`any` should apply to dict regardless of `get_all`')
|
||||||
|
self.assertIs(traverse_obj(_TEST_DATA, [('dict', 'None', 100, 1.2), all, T(float)]), None,
|
||||||
|
msg='`all` should reset branching status')
|
||||||
|
self.assertIs(traverse_obj(_TEST_DATA, [('dict', 'None', 100, 1.2), any, T(float)]), None,
|
||||||
|
msg='`any` should reset branching status')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, [('dict', 'None', 100, 1.2), all, Ellipsis, T(float)]), [1.2],
|
||||||
|
msg='`all` should allow further branching')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, [('dict', 'None', 'urls', 'data'), any, Ellipsis, 'index']), [0, 1],
|
||||||
|
msg='`any` should allow further branching')
|
||||||
|
|
||||||
|
def test_traversal_morsel(self):
|
||||||
|
values = {
|
||||||
|
'expires': 'a',
|
||||||
|
'path': 'b',
|
||||||
|
'comment': 'c',
|
||||||
|
'domain': 'd',
|
||||||
|
'max-age': 'e',
|
||||||
|
'secure': 'f',
|
||||||
|
'httponly': 'g',
|
||||||
|
'version': 'h',
|
||||||
|
'samesite': 'i',
|
||||||
|
}
|
||||||
|
# SameSite added in Py3.8, breaks .update for 3.5-3.7
|
||||||
|
if sys.version_info < (3, 8):
|
||||||
|
del values['samesite']
|
||||||
|
morsel = compat_http_cookies.Morsel()
|
||||||
|
morsel.set(str('item_key'), 'item_value', 'coded_value')
|
||||||
|
morsel.update(values)
|
||||||
|
values['key'] = str('item_key')
|
||||||
|
values['value'] = 'item_value'
|
||||||
|
values = dict((str(k), v) for k, v in values.items())
|
||||||
|
# make test pass even without ordered dict
|
||||||
|
value_set = set(values.values())
|
||||||
|
|
||||||
|
for key, value in values.items():
|
||||||
|
self.assertEqual(traverse_obj(morsel, key), value,
|
||||||
|
msg='Morsel should provide access to all values')
|
||||||
|
self.assertEqual(set(traverse_obj(morsel, Ellipsis)), value_set,
|
||||||
|
msg='`...` should yield all values')
|
||||||
|
self.assertEqual(set(traverse_obj(morsel, lambda k, v: True)), value_set,
|
||||||
|
msg='function key should yield all values')
|
||||||
|
self.assertIs(traverse_obj(morsel, [(None,), any]), morsel,
|
||||||
|
msg='Morsel should not be implicitly changed to dict on usage')
|
||||||
|
|
||||||
|
def test_get_first(self):
|
||||||
|
self.assertEqual(get_first([{'a': None}, {'a': 'spam'}], 'a'), 'spam')
|
||||||
|
|
||||||
|
def test_dict_get(self):
|
||||||
|
FALSE_VALUES = {
|
||||||
|
'none': None,
|
||||||
|
'false': False,
|
||||||
|
'zero': 0,
|
||||||
|
'empty_string': '',
|
||||||
|
'empty_list': [],
|
||||||
|
}
|
||||||
|
d = FALSE_VALUES.copy()
|
||||||
|
d['a'] = 42
|
||||||
|
self.assertEqual(dict_get(d, 'a'), 42)
|
||||||
|
self.assertEqual(dict_get(d, 'b'), None)
|
||||||
|
self.assertEqual(dict_get(d, 'b', 42), 42)
|
||||||
|
self.assertEqual(dict_get(d, ('a', )), 42)
|
||||||
|
self.assertEqual(dict_get(d, ('b', 'a', )), 42)
|
||||||
|
self.assertEqual(dict_get(d, ('b', 'c', 'a', 'd', )), 42)
|
||||||
|
self.assertEqual(dict_get(d, ('b', 'c', )), None)
|
||||||
|
self.assertEqual(dict_get(d, ('b', 'c', ), 42), 42)
|
||||||
|
for key, false_value in FALSE_VALUES.items():
|
||||||
|
self.assertEqual(dict_get(d, ('b', 'c', key, )), None)
|
||||||
|
self.assertEqual(dict_get(d, ('b', 'c', key, ), skip_false_values=False), false_value)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
@ -0,0 +1,26 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
|
from youtube_dl.extractor import YoutubeIE
|
||||||
|
|
||||||
|
|
||||||
|
class TestYoutubeMisc(unittest.TestCase):
|
||||||
|
def test_youtube_extract(self):
|
||||||
|
assertExtractId = lambda url, id: self.assertEqual(YoutubeIE.extract_id(url), id)
|
||||||
|
assertExtractId('http://www.youtube.com/watch?&v=BaW_jenozKc', 'BaW_jenozKc')
|
||||||
|
assertExtractId('https://www.youtube.com/watch?&v=BaW_jenozKc', 'BaW_jenozKc')
|
||||||
|
assertExtractId('https://www.youtube.com/watch?feature=player_embedded&v=BaW_jenozKc', 'BaW_jenozKc')
|
||||||
|
assertExtractId('https://www.youtube.com/watch_popup?v=BaW_jenozKc', 'BaW_jenozKc')
|
||||||
|
assertExtractId('http://www.youtube.com/watch?v=BaW_jenozKcsharePLED17F32AD9753930', 'BaW_jenozKc')
|
||||||
|
assertExtractId('BaW_jenozKc', 'BaW_jenozKc')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
@ -0,0 +1,35 @@
|
|||||||
|
<?xml version="1.0"?>
|
||||||
|
<!-- MPD file Generated with GPAC version 1.0.1-revrelease at 2021-11-27T20:53:11.690Z -->
|
||||||
|
<MPD xmlns="urn:mpeg:dash:schema:mpd:2011" minBufferTime="PT1.500S" type="static" mediaPresentationDuration="PT0H0M30.196S" maxSegmentDuration="PT0H0M10.027S" profiles="urn:mpeg:dash:profile:full:2011">
|
||||||
|
<ProgramInformation moreInformationURL="http://gpac.io">
|
||||||
|
<Title>manifest.mpd generated by GPAC</Title>
|
||||||
|
</ProgramInformation>
|
||||||
|
|
||||||
|
<Period duration="PT0H0M30.196S">
|
||||||
|
<AdaptationSet segmentAlignment="true" maxWidth="768" maxHeight="432" maxFrameRate="30000/1001" par="16:9" lang="und" startWithSAP="1">
|
||||||
|
<Representation id="1" mimeType="video/mp4" codecs="avc1.4D401E" width="768" height="432" frameRate="30000/1001" sar="1:1" bandwidth="526987">
|
||||||
|
<BaseURL>video_dashinit.mp4</BaseURL>
|
||||||
|
<SegmentList timescale="90000" duration="900000">
|
||||||
|
<Initialization range="0-881"/>
|
||||||
|
<SegmentURL mediaRange="882-876094" indexRange="882-925"/>
|
||||||
|
<SegmentURL mediaRange="876095-1466732" indexRange="876095-876138"/>
|
||||||
|
<SegmentURL mediaRange="1466733-1953615" indexRange="1466733-1466776"/>
|
||||||
|
<SegmentURL mediaRange="1953616-1994211" indexRange="1953616-1953659"/>
|
||||||
|
</SegmentList>
|
||||||
|
</Representation>
|
||||||
|
</AdaptationSet>
|
||||||
|
<AdaptationSet segmentAlignment="true" lang="und" startWithSAP="1">
|
||||||
|
<Representation id="2" mimeType="audio/mp4" codecs="mp4a.40.2" audioSamplingRate="48000" bandwidth="98096">
|
||||||
|
<AudioChannelConfiguration schemeIdUri="urn:mpeg:dash:23003:3:audio_channel_configuration:2011" value="2"/>
|
||||||
|
<BaseURL>audio_dashinit.mp4</BaseURL>
|
||||||
|
<SegmentList timescale="48000" duration="480000">
|
||||||
|
<Initialization range="0-752"/>
|
||||||
|
<SegmentURL mediaRange="753-124129" indexRange="753-796"/>
|
||||||
|
<SegmentURL mediaRange="124130-250544" indexRange="124130-124173"/>
|
||||||
|
<SegmentURL mediaRange="250545-374929" indexRange="250545-250588"/>
|
||||||
|
</SegmentList>
|
||||||
|
</Representation>
|
||||||
|
</AdaptationSet>
|
||||||
|
</Period>
|
||||||
|
</MPD>
|
||||||
|
|
@ -0,0 +1,351 @@
|
|||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<!-- Created with Unified Streaming Platform (version=1.10.18-20255) -->
|
||||||
|
<MPD
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xmlns="urn:mpeg:dash:schema:mpd:2011"
|
||||||
|
xsi:schemaLocation="urn:mpeg:dash:schema:mpd:2011 http://standards.iso.org/ittf/PubliclyAvailableStandards/MPEG-DASH_schema_files/DASH-MPD.xsd"
|
||||||
|
type="static"
|
||||||
|
mediaPresentationDuration="PT14M48S"
|
||||||
|
maxSegmentDuration="PT1M"
|
||||||
|
minBufferTime="PT10S"
|
||||||
|
profiles="urn:mpeg:dash:profile:isoff-live:2011">
|
||||||
|
<Period
|
||||||
|
id="1"
|
||||||
|
duration="PT14M48S">
|
||||||
|
<BaseURL>dash/</BaseURL>
|
||||||
|
<AdaptationSet
|
||||||
|
id="1"
|
||||||
|
group="1"
|
||||||
|
contentType="audio"
|
||||||
|
segmentAlignment="true"
|
||||||
|
audioSamplingRate="48000"
|
||||||
|
mimeType="audio/mp4"
|
||||||
|
codecs="mp4a.40.2"
|
||||||
|
startWithSAP="1">
|
||||||
|
<AudioChannelConfiguration
|
||||||
|
schemeIdUri="urn:mpeg:dash:23003:3:audio_channel_configuration:2011"
|
||||||
|
value="2" />
|
||||||
|
<Role schemeIdUri="urn:mpeg:dash:role:2011" value="main" />
|
||||||
|
<SegmentTemplate
|
||||||
|
timescale="48000"
|
||||||
|
initialization="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$.dash"
|
||||||
|
media="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$-$Time$.dash">
|
||||||
|
<SegmentTimeline>
|
||||||
|
<S t="0" d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="96256" r="2" />
|
||||||
|
<S d="95232" />
|
||||||
|
<S d="3584" />
|
||||||
|
</SegmentTimeline>
|
||||||
|
</SegmentTemplate>
|
||||||
|
<Representation
|
||||||
|
id="audio=128001"
|
||||||
|
bandwidth="128001">
|
||||||
|
</Representation>
|
||||||
|
</AdaptationSet>
|
||||||
|
<AdaptationSet
|
||||||
|
id="2"
|
||||||
|
group="3"
|
||||||
|
contentType="text"
|
||||||
|
lang="en"
|
||||||
|
mimeType="application/mp4"
|
||||||
|
codecs="stpp"
|
||||||
|
startWithSAP="1">
|
||||||
|
<Role schemeIdUri="urn:mpeg:dash:role:2011" value="subtitle" />
|
||||||
|
<SegmentTemplate
|
||||||
|
timescale="1000"
|
||||||
|
initialization="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$.dash"
|
||||||
|
media="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$-$Time$.dash">
|
||||||
|
<SegmentTimeline>
|
||||||
|
<S t="0" d="60000" r="9" />
|
||||||
|
<S d="24000" />
|
||||||
|
</SegmentTimeline>
|
||||||
|
</SegmentTemplate>
|
||||||
|
<Representation
|
||||||
|
id="textstream_eng=1000"
|
||||||
|
bandwidth="1000">
|
||||||
|
</Representation>
|
||||||
|
</AdaptationSet>
|
||||||
|
<AdaptationSet
|
||||||
|
id="3"
|
||||||
|
group="2"
|
||||||
|
contentType="video"
|
||||||
|
par="960:409"
|
||||||
|
minBandwidth="100000"
|
||||||
|
maxBandwidth="4482000"
|
||||||
|
maxWidth="1689"
|
||||||
|
maxHeight="720"
|
||||||
|
segmentAlignment="true"
|
||||||
|
mimeType="video/mp4"
|
||||||
|
codecs="avc1.4D401F"
|
||||||
|
startWithSAP="1">
|
||||||
|
<Role schemeIdUri="urn:mpeg:dash:role:2011" value="main" />
|
||||||
|
<SegmentTemplate
|
||||||
|
timescale="12288"
|
||||||
|
initialization="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$.dash"
|
||||||
|
media="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$-$Time$.dash">
|
||||||
|
<SegmentTimeline>
|
||||||
|
<S t="0" d="24576" r="443" />
|
||||||
|
</SegmentTimeline>
|
||||||
|
</SegmentTemplate>
|
||||||
|
<Representation
|
||||||
|
id="video=100000"
|
||||||
|
bandwidth="100000"
|
||||||
|
width="336"
|
||||||
|
height="144"
|
||||||
|
sar="2880:2863"
|
||||||
|
scanType="progressive">
|
||||||
|
</Representation>
|
||||||
|
<Representation
|
||||||
|
id="video=326000"
|
||||||
|
bandwidth="326000"
|
||||||
|
width="562"
|
||||||
|
height="240"
|
||||||
|
sar="115200:114929"
|
||||||
|
scanType="progressive">
|
||||||
|
</Representation>
|
||||||
|
<Representation
|
||||||
|
id="video=698000"
|
||||||
|
bandwidth="698000"
|
||||||
|
width="844"
|
||||||
|
height="360"
|
||||||
|
sar="86400:86299"
|
||||||
|
scanType="progressive">
|
||||||
|
</Representation>
|
||||||
|
<Representation
|
||||||
|
id="video=1493000"
|
||||||
|
bandwidth="1493000"
|
||||||
|
width="1126"
|
||||||
|
height="480"
|
||||||
|
sar="230400:230267"
|
||||||
|
scanType="progressive">
|
||||||
|
</Representation>
|
||||||
|
<Representation
|
||||||
|
id="video=4482000"
|
||||||
|
bandwidth="4482000"
|
||||||
|
width="1688"
|
||||||
|
height="720"
|
||||||
|
sar="86400:86299"
|
||||||
|
scanType="progressive">
|
||||||
|
</Representation>
|
||||||
|
</AdaptationSet>
|
||||||
|
</Period>
|
||||||
|
</MPD>
|
@ -0,0 +1,32 @@
|
|||||||
|
<?xml version="1.0" ?>
|
||||||
|
<MPD xmlns="urn:mpeg:dash:schema:mpd:2011" profiles="urn:mpeg:dash:profile:isoff-live:2011" minBufferTime="PT10.01S" mediaPresentationDuration="PT30.097S" type="static">
|
||||||
|
<!-- Created with Bento4 mp4-dash.py, VERSION=2.0.0-639 -->
|
||||||
|
<Period>
|
||||||
|
<!-- Video -->
|
||||||
|
<AdaptationSet mimeType="video/mp4" segmentAlignment="true" startWithSAP="1" maxWidth="768" maxHeight="432">
|
||||||
|
<Representation id="video-avc1" codecs="avc1.4D401E" width="768" height="432" scanType="progressive" frameRate="30000/1001" bandwidth="699597">
|
||||||
|
<SegmentList timescale="1000" duration="10010">
|
||||||
|
<Initialization sourceURL="video-frag.mp4" range="36-746"/>
|
||||||
|
<SegmentURL media="video-frag.mp4" mediaRange="747-876117"/>
|
||||||
|
<SegmentURL media="video-frag.mp4" mediaRange="876118-1466913"/>
|
||||||
|
<SegmentURL media="video-frag.mp4" mediaRange="1466914-1953954"/>
|
||||||
|
<SegmentURL media="video-frag.mp4" mediaRange="1953955-1994652"/>
|
||||||
|
</SegmentList>
|
||||||
|
</Representation>
|
||||||
|
</AdaptationSet>
|
||||||
|
<!-- Audio -->
|
||||||
|
<AdaptationSet mimeType="audio/mp4" startWithSAP="1" segmentAlignment="true">
|
||||||
|
<Representation id="audio-und-mp4a.40.2" codecs="mp4a.40.2" bandwidth="98808" audioSamplingRate="48000">
|
||||||
|
<AudioChannelConfiguration schemeIdUri="urn:mpeg:mpegB:cicp:ChannelConfiguration" value="2"/>
|
||||||
|
<SegmentList timescale="1000" duration="10010">
|
||||||
|
<Initialization sourceURL="audio-frag.mp4" range="32-623"/>
|
||||||
|
<SegmentURL media="audio-frag.mp4" mediaRange="624-124199"/>
|
||||||
|
<SegmentURL media="audio-frag.mp4" mediaRange="124200-250303"/>
|
||||||
|
<SegmentURL media="audio-frag.mp4" mediaRange="250304-374365"/>
|
||||||
|
<SegmentURL media="audio-frag.mp4" mediaRange="374366-374836"/>
|
||||||
|
</SegmentList>
|
||||||
|
</Representation>
|
||||||
|
</AdaptationSet>
|
||||||
|
</Period>
|
||||||
|
</MPD>
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,66 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
try:
|
||||||
|
import threading
|
||||||
|
except ImportError:
|
||||||
|
threading = None
|
||||||
|
|
||||||
|
from .common import FileDownloader
|
||||||
|
from ..downloader import get_suitable_downloader
|
||||||
|
from ..extractor.niconico import NiconicoIE
|
||||||
|
from ..utils import sanitized_Request
|
||||||
|
|
||||||
|
|
||||||
|
class NiconicoDmcFD(FileDownloader):
|
||||||
|
""" Downloading niconico douga from DMC with heartbeat """
|
||||||
|
|
||||||
|
FD_NAME = 'niconico_dmc'
|
||||||
|
|
||||||
|
def real_download(self, filename, info_dict):
|
||||||
|
self.to_screen('[%s] Downloading from DMC' % self.FD_NAME)
|
||||||
|
|
||||||
|
ie = NiconicoIE(self.ydl)
|
||||||
|
info_dict, heartbeat_info_dict = ie._get_heartbeat_info(info_dict)
|
||||||
|
|
||||||
|
fd = get_suitable_downloader(info_dict, params=self.params)(self.ydl, self.params)
|
||||||
|
for ph in self._progress_hooks:
|
||||||
|
fd.add_progress_hook(ph)
|
||||||
|
|
||||||
|
if not threading:
|
||||||
|
self.to_screen('[%s] Threading for Heartbeat not available' % self.FD_NAME)
|
||||||
|
return fd.real_download(filename, info_dict)
|
||||||
|
|
||||||
|
success = download_complete = False
|
||||||
|
timer = [None]
|
||||||
|
heartbeat_lock = threading.Lock()
|
||||||
|
heartbeat_url = heartbeat_info_dict['url']
|
||||||
|
heartbeat_data = heartbeat_info_dict['data'].encode()
|
||||||
|
heartbeat_interval = heartbeat_info_dict.get('interval', 30)
|
||||||
|
|
||||||
|
request = sanitized_Request(heartbeat_url, heartbeat_data)
|
||||||
|
|
||||||
|
def heartbeat():
|
||||||
|
try:
|
||||||
|
self.ydl.urlopen(request).read()
|
||||||
|
except Exception:
|
||||||
|
self.to_screen('[%s] Heartbeat failed' % self.FD_NAME)
|
||||||
|
|
||||||
|
with heartbeat_lock:
|
||||||
|
if not download_complete:
|
||||||
|
timer[0] = threading.Timer(heartbeat_interval, heartbeat)
|
||||||
|
timer[0].start()
|
||||||
|
|
||||||
|
heartbeat_info_dict['ping']()
|
||||||
|
self.to_screen('[%s] Heartbeat with %d second interval ...' % (self.FD_NAME, heartbeat_interval))
|
||||||
|
try:
|
||||||
|
heartbeat()
|
||||||
|
if type(fd).__name__ == 'HlsFD':
|
||||||
|
info_dict.update(ie._extract_m3u8_formats(info_dict['url'], info_dict['id'])[0])
|
||||||
|
success = fd.real_download(filename, info_dict)
|
||||||
|
finally:
|
||||||
|
if heartbeat_lock:
|
||||||
|
with heartbeat_lock:
|
||||||
|
timer[0].cancel()
|
||||||
|
download_complete = True
|
||||||
|
return success
|
@ -0,0 +1,89 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from .common import InfoExtractor
|
||||||
|
from ..utils import (
|
||||||
|
clean_html,
|
||||||
|
dict_get,
|
||||||
|
get_element_by_class,
|
||||||
|
int_or_none,
|
||||||
|
unified_strdate,
|
||||||
|
url_or_none,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Alsace20TVIE(InfoExtractor):
|
||||||
|
_VALID_URL = r'https?://(?:www\.)?alsace20\.tv/(?:[\w-]+/)+[\w-]+-(?P<id>[\w]+)'
|
||||||
|
_TESTS = [{
|
||||||
|
'url': 'https://www.alsace20.tv/VOD/Actu/JT/Votre-JT-jeudi-3-fevrier-lyNHCXpYJh.html',
|
||||||
|
# 'md5': 'd91851bf9af73c0ad9b2cdf76c127fbb',
|
||||||
|
'info_dict': {
|
||||||
|
'id': 'lyNHCXpYJh',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'description': 'md5:fc0bc4a0692d3d2dba4524053de4c7b7',
|
||||||
|
'title': 'Votre JT du jeudi 3 février',
|
||||||
|
'upload_date': '20220203',
|
||||||
|
'thumbnail': r're:https?://.+\.jpg',
|
||||||
|
'duration': 1073,
|
||||||
|
'view_count': int,
|
||||||
|
},
|
||||||
|
'params': {
|
||||||
|
'format': 'bestvideo',
|
||||||
|
},
|
||||||
|
}]
|
||||||
|
|
||||||
|
def _extract_video(self, video_id, url=None):
|
||||||
|
info = self._download_json(
|
||||||
|
'https://www.alsace20.tv/visionneuse/visio_v9_js.php?key=%s&habillage=0&mode=html' % (video_id, ),
|
||||||
|
video_id) or {}
|
||||||
|
title = info['titre']
|
||||||
|
|
||||||
|
formats = []
|
||||||
|
for res, fmt_url in (info.get('files') or {}).items():
|
||||||
|
formats.extend(
|
||||||
|
self._extract_smil_formats(fmt_url, video_id, fatal=False)
|
||||||
|
if '/smil:_' in fmt_url
|
||||||
|
else self._extract_mpd_formats(fmt_url, video_id, mpd_id=res, fatal=False))
|
||||||
|
self._sort_formats(formats)
|
||||||
|
|
||||||
|
webpage = (url and self._download_webpage(url, video_id, fatal=False)) or ''
|
||||||
|
thumbnail = url_or_none(dict_get(info, ('image', 'preview', )) or self._og_search_thumbnail(webpage))
|
||||||
|
upload_date = self._search_regex(r'/(\d{6})_', thumbnail, 'upload_date', default=None)
|
||||||
|
upload_date = unified_strdate('20%s-%s-%s' % (upload_date[:2], upload_date[2:4], upload_date[4:])) if upload_date else None
|
||||||
|
return {
|
||||||
|
'id': video_id,
|
||||||
|
'title': title,
|
||||||
|
'formats': formats,
|
||||||
|
'description': clean_html(get_element_by_class('wysiwyg', webpage)),
|
||||||
|
'upload_date': upload_date,
|
||||||
|
'thumbnail': thumbnail,
|
||||||
|
'duration': int_or_none(self._og_search_property('video:duration', webpage) if webpage else None),
|
||||||
|
'view_count': int_or_none(info.get('nb_vues')),
|
||||||
|
}
|
||||||
|
|
||||||
|
def _real_extract(self, url):
|
||||||
|
video_id = self._match_id(url)
|
||||||
|
return self._extract_video(video_id, url)
|
||||||
|
|
||||||
|
|
||||||
|
class Alsace20TVEmbedIE(Alsace20TVIE):
|
||||||
|
_VALID_URL = r'https?://(?:www\.)?alsace20\.tv/emb/(?P<id>[\w]+)'
|
||||||
|
_TESTS = [{
|
||||||
|
'url': 'https://www.alsace20.tv/emb/lyNHCXpYJh',
|
||||||
|
# 'md5': 'd91851bf9af73c0ad9b2cdf76c127fbb',
|
||||||
|
'info_dict': {
|
||||||
|
'id': 'lyNHCXpYJh',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'title': 'Votre JT du jeudi 3 février',
|
||||||
|
'upload_date': '20220203',
|
||||||
|
'thumbnail': r're:https?://.+\.jpg',
|
||||||
|
'view_count': int,
|
||||||
|
},
|
||||||
|
'params': {
|
||||||
|
'format': 'bestvideo',
|
||||||
|
},
|
||||||
|
}]
|
||||||
|
|
||||||
|
def _real_extract(self, url):
|
||||||
|
video_id = self._match_id(url)
|
||||||
|
return self._extract_video(video_id)
|
@ -0,0 +1,101 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from .common import InfoExtractor
|
||||||
|
from ..compat import (
|
||||||
|
compat_parse_qs,
|
||||||
|
compat_urllib_parse_urlparse,
|
||||||
|
)
|
||||||
|
from ..utils import (
|
||||||
|
float_or_none,
|
||||||
|
int_or_none,
|
||||||
|
parse_iso8601,
|
||||||
|
remove_start,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ArnesIE(InfoExtractor):
|
||||||
|
IE_NAME = 'video.arnes.si'
|
||||||
|
IE_DESC = 'Arnes Video'
|
||||||
|
_VALID_URL = r'https?://video\.arnes\.si/(?:[a-z]{2}/)?(?:watch|embed|api/(?:asset|public/video))/(?P<id>[0-9a-zA-Z]{12})'
|
||||||
|
_TESTS = [{
|
||||||
|
'url': 'https://video.arnes.si/watch/a1qrWTOQfVoU?t=10',
|
||||||
|
'md5': '4d0f4d0a03571b33e1efac25fd4a065d',
|
||||||
|
'info_dict': {
|
||||||
|
'id': 'a1qrWTOQfVoU',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'title': 'Linearna neodvisnost, definicija',
|
||||||
|
'description': 'Linearna neodvisnost, definicija',
|
||||||
|
'license': 'PRIVATE',
|
||||||
|
'creator': 'Polona Oblak',
|
||||||
|
'timestamp': 1585063725,
|
||||||
|
'upload_date': '20200324',
|
||||||
|
'channel': 'Polona Oblak',
|
||||||
|
'channel_id': 'q6pc04hw24cj',
|
||||||
|
'channel_url': 'https://video.arnes.si/?channel=q6pc04hw24cj',
|
||||||
|
'duration': 596.75,
|
||||||
|
'view_count': int,
|
||||||
|
'tags': ['linearna_algebra'],
|
||||||
|
'start_time': 10,
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
'url': 'https://video.arnes.si/api/asset/s1YjnV7hadlC/play.mp4',
|
||||||
|
'only_matching': True,
|
||||||
|
}, {
|
||||||
|
'url': 'https://video.arnes.si/embed/s1YjnV7hadlC',
|
||||||
|
'only_matching': True,
|
||||||
|
}, {
|
||||||
|
'url': 'https://video.arnes.si/en/watch/s1YjnV7hadlC',
|
||||||
|
'only_matching': True,
|
||||||
|
}, {
|
||||||
|
'url': 'https://video.arnes.si/embed/s1YjnV7hadlC?t=123&hideRelated=1',
|
||||||
|
'only_matching': True,
|
||||||
|
}, {
|
||||||
|
'url': 'https://video.arnes.si/api/public/video/s1YjnV7hadlC',
|
||||||
|
'only_matching': True,
|
||||||
|
}]
|
||||||
|
_BASE_URL = 'https://video.arnes.si'
|
||||||
|
|
||||||
|
def _real_extract(self, url):
|
||||||
|
video_id = self._match_id(url)
|
||||||
|
|
||||||
|
video = self._download_json(
|
||||||
|
self._BASE_URL + '/api/public/video/' + video_id, video_id)['data']
|
||||||
|
title = video['title']
|
||||||
|
|
||||||
|
formats = []
|
||||||
|
for media in (video.get('media') or []):
|
||||||
|
media_url = media.get('url')
|
||||||
|
if not media_url:
|
||||||
|
continue
|
||||||
|
formats.append({
|
||||||
|
'url': self._BASE_URL + media_url,
|
||||||
|
'format_id': remove_start(media.get('format'), 'FORMAT_'),
|
||||||
|
'format_note': media.get('formatTranslation'),
|
||||||
|
'width': int_or_none(media.get('width')),
|
||||||
|
'height': int_or_none(media.get('height')),
|
||||||
|
})
|
||||||
|
self._sort_formats(formats)
|
||||||
|
|
||||||
|
channel = video.get('channel') or {}
|
||||||
|
channel_id = channel.get('url')
|
||||||
|
thumbnail = video.get('thumbnailUrl')
|
||||||
|
|
||||||
|
return {
|
||||||
|
'id': video_id,
|
||||||
|
'title': title,
|
||||||
|
'formats': formats,
|
||||||
|
'thumbnail': self._BASE_URL + thumbnail,
|
||||||
|
'description': video.get('description'),
|
||||||
|
'license': video.get('license'),
|
||||||
|
'creator': video.get('author'),
|
||||||
|
'timestamp': parse_iso8601(video.get('creationTime')),
|
||||||
|
'channel': channel.get('name'),
|
||||||
|
'channel_id': channel_id,
|
||||||
|
'channel_url': self._BASE_URL + '/?channel=' + channel_id if channel_id else None,
|
||||||
|
'duration': float_or_none(video.get('duration'), 1000),
|
||||||
|
'view_count': int_or_none(video.get('views')),
|
||||||
|
'tags': video.get('hashtags'),
|
||||||
|
'start_time': int_or_none(compat_parse_qs(
|
||||||
|
compat_urllib_parse_urlparse(url).query).get('t', [None])[0]),
|
||||||
|
}
|
@ -0,0 +1,37 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from .brightcove import BrightcoveNewIE
|
||||||
|
from ..utils import extract_attributes
|
||||||
|
|
||||||
|
|
||||||
|
class BandaiChannelIE(BrightcoveNewIE):
|
||||||
|
IE_NAME = 'bandaichannel'
|
||||||
|
_VALID_URL = r'https?://(?:www\.)?b-ch\.com/titles/(?P<id>\d+/\d+)'
|
||||||
|
_TESTS = [{
|
||||||
|
'url': 'https://www.b-ch.com/titles/514/001',
|
||||||
|
'md5': 'a0f2d787baa5729bed71108257f613a4',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '6128044564001',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'title': 'メタルファイターMIKU 第1話',
|
||||||
|
'timestamp': 1580354056,
|
||||||
|
'uploader_id': '5797077852001',
|
||||||
|
'upload_date': '20200130',
|
||||||
|
'duration': 1387.733,
|
||||||
|
},
|
||||||
|
'params': {
|
||||||
|
'format': 'bestvideo',
|
||||||
|
'skip_download': True,
|
||||||
|
},
|
||||||
|
}]
|
||||||
|
|
||||||
|
def _real_extract(self, url):
|
||||||
|
video_id = self._match_id(url)
|
||||||
|
webpage = self._download_webpage(url, video_id)
|
||||||
|
attrs = extract_attributes(self._search_regex(
|
||||||
|
r'(<video-js[^>]+\bid="bcplayer"[^>]*>)', webpage, 'player'))
|
||||||
|
bc = self._download_json(
|
||||||
|
'https://pbifcd.b-ch.com/v1/playbackinfo/ST/70/' + attrs['data-info'],
|
||||||
|
video_id, headers={'X-API-KEY': attrs['data-auth'].strip()})['bc']
|
||||||
|
return self._parse_brightcove_metadata(bc, bc['id'])
|
@ -0,0 +1,59 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from .common import InfoExtractor
|
||||||
|
from ..utils import ExtractorError, urlencode_postdata
|
||||||
|
|
||||||
|
|
||||||
|
class BigoIE(InfoExtractor):
|
||||||
|
_VALID_URL = r'https?://(?:www\.)?bigo\.tv/(?:[a-z]{2,}/)?(?P<id>[^/]+)'
|
||||||
|
|
||||||
|
_TESTS = [{
|
||||||
|
'url': 'https://www.bigo.tv/ja/221338632',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '6576287577575737440',
|
||||||
|
'title': '土よ〜💁♂️ 休憩室/REST room',
|
||||||
|
'thumbnail': r're:https?://.+',
|
||||||
|
'uploader': '✨Shin💫',
|
||||||
|
'uploader_id': '221338632',
|
||||||
|
'is_live': True,
|
||||||
|
},
|
||||||
|
'skip': 'livestream',
|
||||||
|
}, {
|
||||||
|
'url': 'https://www.bigo.tv/th/Tarlerm1304',
|
||||||
|
'only_matching': True,
|
||||||
|
}, {
|
||||||
|
'url': 'https://bigo.tv/115976881',
|
||||||
|
'only_matching': True,
|
||||||
|
}]
|
||||||
|
|
||||||
|
def _real_extract(self, url):
|
||||||
|
user_id = self._match_id(url)
|
||||||
|
|
||||||
|
info_raw = self._download_json(
|
||||||
|
'https://bigo.tv/studio/getInternalStudioInfo',
|
||||||
|
user_id, data=urlencode_postdata({'siteId': user_id}))
|
||||||
|
|
||||||
|
if not isinstance(info_raw, dict):
|
||||||
|
raise ExtractorError('Received invalid JSON data')
|
||||||
|
if info_raw.get('code'):
|
||||||
|
raise ExtractorError(
|
||||||
|
'Bigo says: %s (code %s)' % (info_raw.get('msg'), info_raw.get('code')), expected=True)
|
||||||
|
info = info_raw.get('data') or {}
|
||||||
|
|
||||||
|
if not info.get('alive'):
|
||||||
|
raise ExtractorError('This user is offline.', expected=True)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'id': info.get('roomId') or user_id,
|
||||||
|
'title': info.get('roomTopic') or info.get('nick_name') or user_id,
|
||||||
|
'formats': [{
|
||||||
|
'url': info.get('hls_src'),
|
||||||
|
'ext': 'mp4',
|
||||||
|
'protocol': 'm3u8',
|
||||||
|
}],
|
||||||
|
'thumbnail': info.get('snapshot'),
|
||||||
|
'uploader': info.get('nick_name'),
|
||||||
|
'uploader_id': user_id,
|
||||||
|
'is_live': True,
|
||||||
|
}
|
@ -0,0 +1,173 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
from ..utils import (
|
||||||
|
strip_or_none,
|
||||||
|
traverse_obj,
|
||||||
|
)
|
||||||
|
from .common import InfoExtractor
|
||||||
|
|
||||||
|
|
||||||
|
class BlerpIE(InfoExtractor):
|
||||||
|
IE_NAME = 'blerp'
|
||||||
|
_VALID_URL = r'https?://(?:www\.)?blerp\.com/soundbites/(?P<id>[0-9a-zA-Z]+)'
|
||||||
|
_TESTS = [{
|
||||||
|
'url': 'https://blerp.com/soundbites/6320fe8745636cb4dd677a5a',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '6320fe8745636cb4dd677a5a',
|
||||||
|
'title': 'Samsung Galaxy S8 Over the Horizon Ringtone 2016',
|
||||||
|
'uploader': 'luminousaj',
|
||||||
|
'uploader_id': '5fb81e51aa66ae000c395478',
|
||||||
|
'ext': 'mp3',
|
||||||
|
'tags': ['samsung', 'galaxy', 's8', 'over the horizon', '2016', 'ringtone'],
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
'url': 'https://blerp.com/soundbites/5bc94ef4796001000498429f',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '5bc94ef4796001000498429f',
|
||||||
|
'title': 'Yee',
|
||||||
|
'uploader': '179617322678353920',
|
||||||
|
'uploader_id': '5ba99cf71386730004552c42',
|
||||||
|
'ext': 'mp3',
|
||||||
|
'tags': ['YEE', 'YEET', 'wo ha haah catchy tune yee', 'yee']
|
||||||
|
}
|
||||||
|
}]
|
||||||
|
|
||||||
|
_GRAPHQL_OPERATIONNAME = "webBitePageGetBite"
|
||||||
|
_GRAPHQL_QUERY = (
|
||||||
|
'''query webBitePageGetBite($_id: MongoID!) {
|
||||||
|
web {
|
||||||
|
biteById(_id: $_id) {
|
||||||
|
...bitePageFrag
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment bitePageFrag on Bite {
|
||||||
|
_id
|
||||||
|
title
|
||||||
|
userKeywords
|
||||||
|
keywords
|
||||||
|
color
|
||||||
|
visibility
|
||||||
|
isPremium
|
||||||
|
owned
|
||||||
|
price
|
||||||
|
extraReview
|
||||||
|
isAudioExists
|
||||||
|
image {
|
||||||
|
filename
|
||||||
|
original {
|
||||||
|
url
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
userReactions {
|
||||||
|
_id
|
||||||
|
reactions
|
||||||
|
createdAt
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
topReactions
|
||||||
|
totalSaveCount
|
||||||
|
saved
|
||||||
|
blerpLibraryType
|
||||||
|
license
|
||||||
|
licenseMetaData
|
||||||
|
playCount
|
||||||
|
totalShareCount
|
||||||
|
totalFavoriteCount
|
||||||
|
totalAddedToBoardCount
|
||||||
|
userCategory
|
||||||
|
userAudioQuality
|
||||||
|
audioCreationState
|
||||||
|
transcription
|
||||||
|
userTranscription
|
||||||
|
description
|
||||||
|
createdAt
|
||||||
|
updatedAt
|
||||||
|
author
|
||||||
|
listingType
|
||||||
|
ownerObject {
|
||||||
|
_id
|
||||||
|
username
|
||||||
|
profileImage {
|
||||||
|
filename
|
||||||
|
original {
|
||||||
|
url
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
transcription
|
||||||
|
favorited
|
||||||
|
visibility
|
||||||
|
isCurated
|
||||||
|
sourceUrl
|
||||||
|
audienceRating
|
||||||
|
strictAudienceRating
|
||||||
|
ownerId
|
||||||
|
reportObject {
|
||||||
|
reportedContentStatus
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
giphy {
|
||||||
|
mp4
|
||||||
|
gif
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
audio {
|
||||||
|
filename
|
||||||
|
original {
|
||||||
|
url
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
mp3 {
|
||||||
|
url
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
|
||||||
|
''')
|
||||||
|
|
||||||
|
def _real_extract(self, url):
|
||||||
|
audio_id = self._match_id(url)
|
||||||
|
|
||||||
|
data = {
|
||||||
|
'operationName': self._GRAPHQL_OPERATIONNAME,
|
||||||
|
'query': self._GRAPHQL_QUERY,
|
||||||
|
'variables': {
|
||||||
|
'_id': audio_id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
|
||||||
|
json_result = self._download_json('https://api.blerp.com/graphql',
|
||||||
|
audio_id, data=json.dumps(data).encode('utf-8'), headers=headers)
|
||||||
|
|
||||||
|
bite_json = json_result['data']['web']['biteById']
|
||||||
|
|
||||||
|
info_dict = {
|
||||||
|
'id': bite_json['_id'],
|
||||||
|
'url': bite_json['audio']['mp3']['url'],
|
||||||
|
'title': bite_json['title'],
|
||||||
|
'uploader': traverse_obj(bite_json, ('ownerObject', 'username'), expected_type=strip_or_none),
|
||||||
|
'uploader_id': traverse_obj(bite_json, ('ownerObject', '_id'), expected_type=strip_or_none),
|
||||||
|
'ext': 'mp3',
|
||||||
|
'tags': list(filter(None, map(strip_or_none, (traverse_obj(bite_json, 'userKeywords', expected_type=list) or []))) or None)
|
||||||
|
}
|
||||||
|
|
||||||
|
return info_dict
|
@ -1,86 +0,0 @@
|
|||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
from .common import InfoExtractor
|
|
||||||
from ..utils import (
|
|
||||||
remove_start,
|
|
||||||
int_or_none,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class BlinkxIE(InfoExtractor):
|
|
||||||
_VALID_URL = r'(?:https?://(?:www\.)blinkx\.com/#?ce/|blinkx:)(?P<id>[^?]+)'
|
|
||||||
IE_NAME = 'blinkx'
|
|
||||||
|
|
||||||
_TEST = {
|
|
||||||
'url': 'http://www.blinkx.com/ce/Da0Gw3xc5ucpNduzLuDDlv4WC9PuI4fDi1-t6Y3LyfdY2SZS5Urbvn-UPJvrvbo8LTKTc67Wu2rPKSQDJyZeeORCR8bYkhs8lI7eqddznH2ofh5WEEdjYXnoRtj7ByQwt7atMErmXIeYKPsSDuMAAqJDlQZ-3Ff4HJVeH_s3Gh8oQ',
|
|
||||||
'md5': '337cf7a344663ec79bf93a526a2e06c7',
|
|
||||||
'info_dict': {
|
|
||||||
'id': 'Da0Gw3xc',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'No Daily Show for John Oliver; HBO Show Renewed - IGN News',
|
|
||||||
'uploader': 'IGN News',
|
|
||||||
'upload_date': '20150217',
|
|
||||||
'timestamp': 1424215740,
|
|
||||||
'description': 'HBO has renewed Last Week Tonight With John Oliver for two more seasons.',
|
|
||||||
'duration': 47.743333,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
video_id = self._match_id(url)
|
|
||||||
display_id = video_id[:8]
|
|
||||||
|
|
||||||
api_url = ('https://apib4.blinkx.com/api.php?action=play_video&'
|
|
||||||
+ 'video=%s' % video_id)
|
|
||||||
data_json = self._download_webpage(api_url, display_id)
|
|
||||||
data = json.loads(data_json)['api']['results'][0]
|
|
||||||
duration = None
|
|
||||||
thumbnails = []
|
|
||||||
formats = []
|
|
||||||
for m in data['media']:
|
|
||||||
if m['type'] == 'jpg':
|
|
||||||
thumbnails.append({
|
|
||||||
'url': m['link'],
|
|
||||||
'width': int(m['w']),
|
|
||||||
'height': int(m['h']),
|
|
||||||
})
|
|
||||||
elif m['type'] == 'original':
|
|
||||||
duration = float(m['d'])
|
|
||||||
elif m['type'] == 'youtube':
|
|
||||||
yt_id = m['link']
|
|
||||||
self.to_screen('Youtube video detected: %s' % yt_id)
|
|
||||||
return self.url_result(yt_id, 'Youtube', video_id=yt_id)
|
|
||||||
elif m['type'] in ('flv', 'mp4'):
|
|
||||||
vcodec = remove_start(m['vcodec'], 'ff')
|
|
||||||
acodec = remove_start(m['acodec'], 'ff')
|
|
||||||
vbr = int_or_none(m.get('vbr') or m.get('vbitrate'), 1000)
|
|
||||||
abr = int_or_none(m.get('abr') or m.get('abitrate'), 1000)
|
|
||||||
tbr = vbr + abr if vbr and abr else None
|
|
||||||
format_id = '%s-%sk-%s' % (vcodec, tbr, m['w'])
|
|
||||||
formats.append({
|
|
||||||
'format_id': format_id,
|
|
||||||
'url': m['link'],
|
|
||||||
'vcodec': vcodec,
|
|
||||||
'acodec': acodec,
|
|
||||||
'abr': abr,
|
|
||||||
'vbr': vbr,
|
|
||||||
'tbr': tbr,
|
|
||||||
'width': int_or_none(m.get('w')),
|
|
||||||
'height': int_or_none(m.get('h')),
|
|
||||||
})
|
|
||||||
|
|
||||||
self._sort_formats(formats)
|
|
||||||
|
|
||||||
return {
|
|
||||||
'id': display_id,
|
|
||||||
'fullid': video_id,
|
|
||||||
'title': data['title'],
|
|
||||||
'formats': formats,
|
|
||||||
'uploader': data['channel_name'],
|
|
||||||
'timestamp': data['pubdate_epoch'],
|
|
||||||
'description': data.get('description'),
|
|
||||||
'thumbnails': thumbnails,
|
|
||||||
'duration': duration,
|
|
||||||
}
|
|
@ -0,0 +1,79 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from .common import InfoExtractor
|
||||||
|
from ..utils import (
|
||||||
|
determine_ext,
|
||||||
|
int_or_none,
|
||||||
|
merge_dicts,
|
||||||
|
parse_iso8601,
|
||||||
|
T,
|
||||||
|
traverse_obj,
|
||||||
|
txt_or_none,
|
||||||
|
urljoin,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CaffeineTVIE(InfoExtractor):
|
||||||
|
_VALID_URL = r'https?://(?:www\.)?caffeine\.tv/[^/]+/video/(?P<id>[0-9a-f-]+)'
|
||||||
|
_TESTS = [{
|
||||||
|
'url': 'https://www.caffeine.tv/TsuSurf/video/cffc0a00-e73f-11ec-8080-80017d29f26e',
|
||||||
|
'info_dict': {
|
||||||
|
'id': 'cffc0a00-e73f-11ec-8080-80017d29f26e',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'title': 'GOOOOD MORNINNNNN #highlights',
|
||||||
|
'timestamp': 1654702180,
|
||||||
|
'upload_date': '20220608',
|
||||||
|
'uploader': 'TsuSurf',
|
||||||
|
'duration': 3145,
|
||||||
|
'age_limit': 17,
|
||||||
|
},
|
||||||
|
'params': {
|
||||||
|
'format': 'bestvideo',
|
||||||
|
},
|
||||||
|
}]
|
||||||
|
|
||||||
|
def _real_extract(self, url):
|
||||||
|
video_id = self._match_id(url)
|
||||||
|
json_data = self._download_json(
|
||||||
|
'https://api.caffeine.tv/social/public/activity/' + video_id,
|
||||||
|
video_id)
|
||||||
|
broadcast_info = traverse_obj(json_data, ('broadcast_info', T(dict))) or {}
|
||||||
|
title = broadcast_info['broadcast_title']
|
||||||
|
video_url = broadcast_info['video_url']
|
||||||
|
|
||||||
|
ext = determine_ext(video_url)
|
||||||
|
if ext == 'm3u8':
|
||||||
|
formats = self._extract_m3u8_formats(
|
||||||
|
video_url, video_id, 'mp4', entry_protocol='m3u8',
|
||||||
|
fatal=False)
|
||||||
|
else:
|
||||||
|
formats = [{'url': video_url}]
|
||||||
|
self._sort_formats(formats)
|
||||||
|
|
||||||
|
return merge_dicts({
|
||||||
|
'id': video_id,
|
||||||
|
'title': title,
|
||||||
|
'formats': formats,
|
||||||
|
}, traverse_obj(json_data, {
|
||||||
|
'uploader': ((None, 'user'), 'username'),
|
||||||
|
}, get_all=False), traverse_obj(json_data, {
|
||||||
|
'like_count': ('like_count', T(int_or_none)),
|
||||||
|
'view_count': ('view_count', T(int_or_none)),
|
||||||
|
'comment_count': ('comment_count', T(int_or_none)),
|
||||||
|
'tags': ('tags', Ellipsis, T(txt_or_none)),
|
||||||
|
'is_live': 'is_live',
|
||||||
|
'uploader': ('user', 'name'),
|
||||||
|
}), traverse_obj(broadcast_info, {
|
||||||
|
'duration': ('content_duration', T(int_or_none)),
|
||||||
|
'timestamp': ('broadcast_start_time', T(parse_iso8601)),
|
||||||
|
'thumbnail': ('preview_image_path', T(lambda u: urljoin(url, u))),
|
||||||
|
'age_limit': ('content_rating', T(lambda r: r and {
|
||||||
|
# assume Apple Store ratings [1]
|
||||||
|
# 1. https://en.wikipedia.org/wiki/Mobile_software_content_rating_system
|
||||||
|
'FOUR_PLUS': 0,
|
||||||
|
'NINE_PLUS': 9,
|
||||||
|
'TWELVE_PLUS': 12,
|
||||||
|
'SEVENTEEN_PLUS': 17,
|
||||||
|
}.get(r, 17))),
|
||||||
|
}))
|
@ -1,38 +1,113 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
from .cbs import CBSBaseIE
|
import re
|
||||||
|
|
||||||
|
# from .cbs import CBSBaseIE
|
||||||
|
from .common import InfoExtractor
|
||||||
|
from ..utils import (
|
||||||
|
int_or_none,
|
||||||
|
try_get,
|
||||||
|
)
|
||||||
|
|
||||||
class CBSSportsIE(CBSBaseIE):
|
|
||||||
_VALID_URL = r'https?://(?:www\.)?cbssports\.com/[^/]+/(?:video|news)/(?P<id>[^/?#&]+)'
|
|
||||||
|
|
||||||
|
# class CBSSportsEmbedIE(CBSBaseIE):
|
||||||
|
class CBSSportsEmbedIE(InfoExtractor):
|
||||||
|
IE_NAME = 'cbssports:embed'
|
||||||
|
_VALID_URL = r'''(?ix)https?://(?:(?:www\.)?cbs|embed\.247)sports\.com/player/embed.+?
|
||||||
|
(?:
|
||||||
|
ids%3D(?P<id>[\da-f]{8}-(?:[\da-f]{4}-){3}[\da-f]{12})|
|
||||||
|
pcid%3D(?P<pcid>\d+)
|
||||||
|
)'''
|
||||||
_TESTS = [{
|
_TESTS = [{
|
||||||
'url': 'https://www.cbssports.com/nba/video/donovan-mitchell-flashes-star-potential-in-game-2-victory-over-thunder/',
|
'url': 'https://www.cbssports.com/player/embed/?args=player_id%3Db56c03a6-231a-4bbe-9c55-af3c8a8e9636%26ids%3Db56c03a6-231a-4bbe-9c55-af3c8a8e9636%26resizable%3D1%26autoplay%3Dtrue%26domain%3Dcbssports.com%26comp_ads_enabled%3Dfalse%26watchAndRead%3D0%26startTime%3D0%26env%3Dprod',
|
||||||
'info_dict': {
|
'only_matching': True,
|
||||||
'id': '1214315075735',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'Donovan Mitchell flashes star potential in Game 2 victory over Thunder',
|
|
||||||
'description': 'md5:df6f48622612c2d6bd2e295ddef58def',
|
|
||||||
'timestamp': 1524111457,
|
|
||||||
'upload_date': '20180419',
|
|
||||||
'uploader': 'CBSI-NEW',
|
|
||||||
},
|
|
||||||
'params': {
|
|
||||||
# m3u8 download
|
|
||||||
'skip_download': True,
|
|
||||||
}
|
|
||||||
}, {
|
}, {
|
||||||
'url': 'https://www.cbssports.com/nba/news/nba-playoffs-2018-watch-76ers-vs-heat-game-3-series-schedule-tv-channel-online-stream/',
|
'url': 'https://embed.247sports.com/player/embed/?args=%3fplayer_id%3d1827823171591%26channel%3dcollege-football-recruiting%26pcid%3d1827823171591%26width%3d640%26height%3d360%26autoplay%3dTrue%26comp_ads_enabled%3dFalse%26uvpc%3dhttps%253a%252f%252fwww.cbssports.com%252fapi%252fcontent%252fvideo%252fconfig%252f%253fcfg%253duvp_247sports_v4%2526partner%253d247%26uvpc_m%3dhttps%253a%252f%252fwww.cbssports.com%252fapi%252fcontent%252fvideo%252fconfig%252f%253fcfg%253duvp_247sports_m_v4%2526partner_m%253d247_mobile%26utag%3d247sportssite%26resizable%3dTrue',
|
||||||
'only_matching': True,
|
'only_matching': True,
|
||||||
}]
|
}]
|
||||||
|
|
||||||
def _extract_video_info(self, filter_query, video_id):
|
# def _extract_video_info(self, filter_query, video_id):
|
||||||
return self._extract_feed_info('dJ5BDC', 'VxxJg8Ymh8sE', filter_query, video_id)
|
# return self._extract_feed_info('dJ5BDC', 'VxxJg8Ymh8sE', filter_query, video_id)
|
||||||
|
|
||||||
|
def _real_extract(self, url):
|
||||||
|
uuid, pcid = re.match(self._VALID_URL, url).groups()
|
||||||
|
query = {'id': uuid} if uuid else {'pcid': pcid}
|
||||||
|
video = self._download_json(
|
||||||
|
'https://www.cbssports.com/api/content/video/',
|
||||||
|
uuid or pcid, query=query)[0]
|
||||||
|
video_id = video['id']
|
||||||
|
title = video['title']
|
||||||
|
metadata = video.get('metaData') or {}
|
||||||
|
# return self._extract_video_info('byId=%d' % metadata['mpxOutletId'], video_id)
|
||||||
|
# return self._extract_video_info('byGuid=' + metadata['mpxRefId'], video_id)
|
||||||
|
|
||||||
|
formats = self._extract_m3u8_formats(
|
||||||
|
metadata['files'][0]['url'], video_id, 'mp4',
|
||||||
|
'm3u8_native', m3u8_id='hls', fatal=False)
|
||||||
|
self._sort_formats(formats)
|
||||||
|
|
||||||
|
image = video.get('image')
|
||||||
|
thumbnails = None
|
||||||
|
if image:
|
||||||
|
image_path = image.get('path')
|
||||||
|
if image_path:
|
||||||
|
thumbnails = [{
|
||||||
|
'url': image_path,
|
||||||
|
'width': int_or_none(image.get('width')),
|
||||||
|
'height': int_or_none(image.get('height')),
|
||||||
|
'filesize': int_or_none(image.get('size')),
|
||||||
|
}]
|
||||||
|
|
||||||
|
return {
|
||||||
|
'id': video_id,
|
||||||
|
'title': title,
|
||||||
|
'formats': formats,
|
||||||
|
'thumbnails': thumbnails,
|
||||||
|
'description': video.get('description'),
|
||||||
|
'timestamp': int_or_none(try_get(video, lambda x: x['dateCreated']['epoch'])),
|
||||||
|
'duration': int_or_none(metadata.get('duration')),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class CBSSportsBaseIE(InfoExtractor):
|
||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
display_id = self._match_id(url)
|
display_id = self._match_id(url)
|
||||||
webpage = self._download_webpage(url, display_id)
|
webpage = self._download_webpage(url, display_id)
|
||||||
video_id = self._search_regex(
|
iframe_url = self._search_regex(
|
||||||
[r'(?:=|%26)pcid%3D(\d+)', r'embedVideo(?:Container)?_(\d+)'],
|
r'<iframe[^>]+(?:data-)?src="(https?://[^/]+/player/embed[^"]+)"',
|
||||||
webpage, 'video id')
|
webpage, 'embed url')
|
||||||
return self._extract_video_info('byId=%s' % video_id, video_id)
|
return self.url_result(iframe_url, CBSSportsEmbedIE.ie_key())
|
||||||
|
|
||||||
|
|
||||||
|
class CBSSportsIE(CBSSportsBaseIE):
|
||||||
|
IE_NAME = 'cbssports'
|
||||||
|
_VALID_URL = r'https?://(?:www\.)?cbssports\.com/[^/]+/video/(?P<id>[^/?#&]+)'
|
||||||
|
_TESTS = [{
|
||||||
|
'url': 'https://www.cbssports.com/college-football/video/cover-3-stanford-spring-gleaning/',
|
||||||
|
'info_dict': {
|
||||||
|
'id': 'b56c03a6-231a-4bbe-9c55-af3c8a8e9636',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'title': 'Cover 3: Stanford Spring Gleaning',
|
||||||
|
'description': 'The Cover 3 crew break down everything you need to know about the Stanford Cardinal this spring.',
|
||||||
|
'timestamp': 1617218398,
|
||||||
|
'upload_date': '20210331',
|
||||||
|
'duration': 502,
|
||||||
|
},
|
||||||
|
}]
|
||||||
|
|
||||||
|
|
||||||
|
class TwentyFourSevenSportsIE(CBSSportsBaseIE):
|
||||||
|
IE_NAME = '247sports'
|
||||||
|
_VALID_URL = r'https?://(?:www\.)?247sports\.com/Video/(?:[^/?#&]+-)?(?P<id>\d+)'
|
||||||
|
_TESTS = [{
|
||||||
|
'url': 'https://247sports.com/Video/2021-QB-Jake-Garcia-senior-highlights-through-five-games-10084854/',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '4f1265cb-c3b5-44a8-bb1d-1914119a0ccc',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'title': '2021 QB Jake Garcia senior highlights through five games',
|
||||||
|
'description': 'md5:8cb67ebed48e2e6adac1701e0ff6e45b',
|
||||||
|
'timestamp': 1607114223,
|
||||||
|
'upload_date': '20201204',
|
||||||
|
'duration': 208,
|
||||||
|
},
|
||||||
|
}]
|
||||||
|
@ -0,0 +1,69 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from .common import InfoExtractor
|
||||||
|
from ..compat import compat_str
|
||||||
|
from ..utils import (
|
||||||
|
ExtractorError,
|
||||||
|
merge_dicts,
|
||||||
|
T,
|
||||||
|
traverse_obj,
|
||||||
|
unified_timestamp,
|
||||||
|
url_or_none,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ClipchampIE(InfoExtractor):
|
||||||
|
_VALID_URL = r'https?://(?:www\.)?clipchamp\.com/watch/(?P<id>[\w-]+)'
|
||||||
|
_TESTS = [{
|
||||||
|
'url': 'https://clipchamp.com/watch/gRXZ4ZhdDaU',
|
||||||
|
'info_dict': {
|
||||||
|
'id': 'gRXZ4ZhdDaU',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'title': 'Untitled video',
|
||||||
|
'uploader': 'Alexander Schwartz',
|
||||||
|
'timestamp': 1680805580,
|
||||||
|
'upload_date': '20230406',
|
||||||
|
'thumbnail': r're:^https?://.+\.jpg',
|
||||||
|
},
|
||||||
|
'params': {
|
||||||
|
'skip_download': 'm3u8',
|
||||||
|
'format': 'bestvideo',
|
||||||
|
},
|
||||||
|
}]
|
||||||
|
|
||||||
|
_STREAM_URL_TMPL = 'https://%s.cloudflarestream.com/%s/manifest/video.%s'
|
||||||
|
_STREAM_URL_QUERY = {'parentOrigin': 'https://clipchamp.com'}
|
||||||
|
|
||||||
|
def _real_extract(self, url):
|
||||||
|
video_id = self._match_id(url)
|
||||||
|
webpage = self._download_webpage(url, video_id)
|
||||||
|
data = self._search_nextjs_data(webpage, video_id)['props']['pageProps']['video']
|
||||||
|
|
||||||
|
storage_location = data.get('storage_location')
|
||||||
|
if storage_location != 'cf_stream':
|
||||||
|
raise ExtractorError('Unsupported clip storage location "%s"' % (storage_location,))
|
||||||
|
|
||||||
|
path = data['download_url']
|
||||||
|
iframe = self._download_webpage(
|
||||||
|
'https://iframe.cloudflarestream.com/' + path, video_id, 'Downloading player iframe')
|
||||||
|
subdomain = self._search_regex(
|
||||||
|
r'''\bcustomer-domain-prefix\s*=\s*("|')(?P<sd>[\w-]+)\1''', iframe,
|
||||||
|
'subdomain', group='sd', fatal=False) or 'customer-2ut9yn3y6fta1yxe'
|
||||||
|
|
||||||
|
formats = self._extract_mpd_formats(
|
||||||
|
self._STREAM_URL_TMPL % (subdomain, path, 'mpd'), video_id,
|
||||||
|
query=self._STREAM_URL_QUERY, fatal=False, mpd_id='dash')
|
||||||
|
formats.extend(self._extract_m3u8_formats(
|
||||||
|
self._STREAM_URL_TMPL % (subdomain, path, 'm3u8'), video_id, 'mp4',
|
||||||
|
query=self._STREAM_URL_QUERY, fatal=False, m3u8_id='hls'))
|
||||||
|
|
||||||
|
return merge_dicts({
|
||||||
|
'id': video_id,
|
||||||
|
'formats': formats,
|
||||||
|
'uploader': ' '.join(traverse_obj(data, ('creator', ('first_name', 'last_name'), T(compat_str)))) or None,
|
||||||
|
}, traverse_obj(data, {
|
||||||
|
'title': ('project', 'project_name', T(compat_str)),
|
||||||
|
'timestamp': ('created_at', T(unified_timestamp)),
|
||||||
|
'thumbnail': ('thumbnail_url', T(url_or_none)),
|
||||||
|
}), rev=True)
|
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue