Compare commits
1080 Commits
2020.11.12
...
master
Author | SHA1 | Date |
---|---|---|
dirkf | c5098961b0 | 2 months ago |
dirkf | dbc08fba83 | 2 months ago |
Aiur Adept | 71223bff39 | 2 months ago |
dirkf | e1b3fa242c | 2 months ago |
dirkf | 451046d62a | 2 months ago |
dirkf | 16f5bbc464 | 3 months ago |
dirkf | d35ce6ce95 | 3 months ago |
dirkf | 76ac69917e | 3 months ago |
dirkf | 756f6b45c7 | 3 months ago |
bashonly | 43a74c5fa5 | 3 months ago |
dirkf | a452f9437c | 3 months ago |
unkernet | 36801c62df | 3 months ago |
Sergey Musatov | f4b47754d9 | 3 months ago |
dirkf | 37cea84f77 | 3 months ago |
dirkf | 4652109643 | 3 months ago |
dirkf | 3c466186a8 | 3 months ago |
dirkf | 4d05f84325 | 3 months ago |
dirkf | e0094e63c3 | 3 months ago |
dirkf | fd8242e3ef | 3 months ago |
dirkf | ad01fa6cca | 3 months ago |
dirkf | 2eac0fa379 | 3 months ago |
Paper | 0153b387e5 | 4 months ago |
dirkf | a48fe7491d | 4 months ago |
dirkf | e20ca543f0 | 4 months ago |
dirkf | e39466051f | 4 months ago |
dirkf | d95c0d203f | 4 months ago |
dirkf | 3bde6a5752 | 4 months ago |
dirkf | 50f6c5668a | 4 months ago |
dirkf | b4ff08bd2d | 4 months ago |
kmnx | 88bd8b9f87 | 4 months ago |
dirkf | 21924742f7 | 4 months ago |
dirkf | 768ccccd9b | 4 months ago |
dirkf | eee9a247eb | 4 months ago |
dirkf | 34484e49f5 | 4 months ago |
dirkf | 06da64ee51 | 4 months ago |
dirkf | a08f2b7e45 | 5 months ago |
dirkf | 668332b973 | 5 months ago |
dirkf | 0b2ce3685e | 5 months ago |
dirkf | c2766cb80e | 5 months ago |
dirkf | eb38665438 | 5 months ago |
dirkf | e0727e4ab6 | 6 months ago |
Ori Avtalion | 4ea59c6107 | 6 months ago |
dirkf | 21792b88b7 | 6 months ago |
dirkf | d8f134a664 | 6 months ago |
dirkf | 31a15a7c8d | 6 months ago |
dirkf | 19dc10b986 | 6 months ago |
dirkf | 182f63e82a | 6 months ago |
gy-chen | 71211e7db7 | 6 months ago |
Zizheng Guo | a96a45b2cd | 7 months ago |
hatsomatt | 820fae3b3a | 7 months ago |
dirkf | aef24d97e9 | 7 months ago |
dirkf | f7b30e3f73 | 7 months ago |
dirkf | f66372403f | 7 months ago |
dirkf | 7216fa2ac4 | 7 months ago |
dirkf | acc383b9e3 | 7 months ago |
Hubert Hirtz | f0812d7848 | 7 months ago |
Aaron Tan | 40bd5c1815 | 7 months ago |
dirkf | 70f230f9cf | 7 months ago |
dirkf | 48ddab1f3a | 7 months ago |
dirkf | 7687389f08 | 7 months ago |
dirkf | 4416f82c80 | 8 months ago |
dirkf | bdda6b81df | 8 months ago |
dirkf | 1fd8f802b8 | 8 months ago |
dirkf | 4eaeb9b2c6 | 8 months ago |
dirkf | bec9180e89 | 8 months ago |
dirkf | c58b655a9e | 8 months ago |
dirkf | dc512e3a8a | 8 months ago |
dirkf | f8b0135850 | 8 months ago |
dirkf | 640d39f03a | 8 months ago |
dirkf | 6651871416 | 8 months ago |
mk-pmb | be008e657d | 10 months ago |
Robotix | b1bbc1e502 | 10 months ago |
dirkf | 55a442adae | 10 months ago |
mimvahedi | c62936a5f2 | 10 months ago |
dirkf | 427472351c | 10 months ago |
dirkf | c6538ed323 | 10 months ago |
dirkf | 8d227cb97b | 10 months ago |
dirkf | 4e115e18cb | 10 months ago |
ReenigneArcher | b7fca0fab3 | 10 months ago |
dirkf | 00ef748cc0 | 1 year ago |
dirkf | 66ab0814c4 | 1 year ago |
dirkf | bbd3e7e999 | 1 year ago |
dirkf | 21caaf2380 | 1 year ago |
dirkf | 31f50c8194 | 1 year ago |
dirkf | 7d58f0769a | 1 year ago |
dirkf | 86e3cf5e58 | 1 year ago |
dirkf | 2efc8de4d2 | 1 year ago |
dirkf | e4178b5af3 | 1 year ago |
dirkf | 2d2a4bc832 | 1 year ago |
dirkf | 7d965e6b65 | 1 year ago |
dirkf | abef53466d | 1 year ago |
dirkf | e7926ae9f4 | 1 year ago |
dirkf | 87e578c9b8 | 1 year ago |
dirkf | 0861812d72 | 1 year ago |
dirkf | b870181229 | 1 year ago |
dirkf | a25e9f3c84 | 1 year ago |
dirkf | aac33155e4 | 1 year ago |
dirkf | 2b7dd3b2a2 | 1 year ago |
dirkf | 44faa71b19 | 1 year ago |
dirkf | 7bce2ad441 | 1 year ago |
dirkf | ca71e56c48 | 1 year ago |
dirkf | 2a4e9faa77 | 1 year ago |
dirkf | 74eef6bb5e | 1 year ago |
dirkf | 1fa8b86f0b | 1 year ago |
dirkf | b2ba24bb02 | 1 year ago |
dirkf | a190b55964 | 1 year ago |
dirkf | b2741f2654 | 1 year ago |
dirkf | 8465222041 | 1 year ago |
dirkf | 4339910df3 | 1 year ago |
dirkf | eaaf4c6736 | 1 year ago |
dirkf | 4566e6e53e | 1 year ago |
dirkf | 1e8ccdd2eb | 1 year ago |
dirkf | cb9366eda5 | 1 year ago |
dirkf | d9d07a9581 | 1 year ago |
dirkf | 825a40744b | 1 year ago |
dirkf | 47214e46d8 | 1 year ago |
dirkf | 1d8d5a93f7 | 1 year ago |
dirkf | 1634b1d61e | 1 year ago |
bashonly | 21438a4194 | 1 year ago |
Simon Sawicki | 8334ec961b | 1 year ago |
bashonly | 3801d36416 | 1 year ago |
dirkf | b383be9887 | 1 year ago |
dirkf | 46fde7caee | 1 year ago |
dirkf | 648dc5304c | 1 year ago |
dirkf | 1720c04dc5 | 1 year ago |
dirkf | d5ef405c5d | 1 year ago |
dirkf | f47fdb9564 | 1 year ago |
dirkf | b6dff4073d | 1 year ago |
dirkf | f24bc9272e | 1 year ago |
dirkf | b08a580906 | 1 year ago |
dirkf | 2500300c2a | 1 year ago |
dirkf | 58fc5bde47 | 1 year ago |
dirkf | fa7f0effbe | 1 year ago |
dirkf | ebdc82c586 | 1 year ago |
pukkandan | 9112e668a5 | 1 year ago |
dirkf | 07af47960f | 1 year ago |
dirkf | ae8ba2c319 | 1 year ago |
dirkf | d6433cbb2c | 1 year ago |
dirkf | ff75c300f5 | 1 year ago |
dirkf | a2534f7b88 | 1 year ago |
dirkf | b8a86dcf1a | 1 year ago |
dirkf | 2389c7cbd3 | 1 year ago |
dirkf | ee731f3d00 | 1 year ago |
dirkf | 1f7c6f8b2b | 1 year ago |
dirkf | d89c2137ba | 1 year ago |
dirkf | d1c6c5c4d6 | 1 year ago |
dirkf | 6ed3433828 | 1 year ago |
dirkf | a85a875fef | 1 year ago |
dirkf | 11cc3f3ad0 | 1 year ago |
dirkf | 64d6dd64c8 | 1 year ago |
dirkf | 211cbfd5d4 | 1 year ago |
dirkf | 26035bde46 | 1 year ago |
dirkf | 2da3fa04a6 | 1 year ago |
Gabriel Nagy | 735e87adfc | 1 year ago |
dirkf | fe7e13066c | 1 year ago |
dirkf | 213d1d91bf | 2 years ago |
dirkf | f8253a5289 | 2 years ago |
dirkf | d6ae3b77cd | 2 years ago |
dirkf | 9f4d83ff42 | 2 years ago |
dirkf | 25124bd640 | 2 years ago |
dirkf | 78da22489b | 2 years ago |
dirkf | 557dbac173 | 2 years ago |
dirkf | cdf40b6aa6 | 2 years ago |
pukkandan | 3f6d2bd76f | 2 years ago |
pukkandan | 88f28f620b | 2 years ago |
dirkf | f35b757c82 | 2 years ago |
dirkf | 45495228b7 | 2 years ago |
dirkf | 6fece0a96b | 2 years ago |
dirkf | 70ff013910 | 2 years ago |
dirkf | e8de54bce5 | 2 years ago |
dirkf | baa6c5e95c | 2 years ago |
dirkf | 5c985d4f81 | 2 years ago |
dirkf | 8c86fd33dc | 2 years ago |
Sophira | 27d41d7365 | 2 years ago |
dirkf | 0402710227 | 2 years ago |
pukkandan | 3e92c60fcd | 2 years ago |
pukkandan | 3da17834a4 | 2 years ago |
dirkf | f7ce98a21e | 2 years ago |
dirkf | e67e52a8f8 | 2 years ago |
pukkandan | 1d3751c3fe | 2 years ago |
df | 6067451e43 | 2 years ago |
dirkf | 57802e632f | 2 years ago |
dirkf | 2dd6c6edd8 | 2 years ago |
dirkf | dd9aa74bee | 2 years ago |
dirkf | 42b098dd79 | 2 years ago |
fonkap | 6f8c2635a5 | 2 years ago |
fonkap | de48105dd8 | 2 years ago |
fonkap | 822f19f05d | 2 years ago |
teddy171 | 33db85c571 | 2 years ago |
Valentin Metz | f33923cba7 | 2 years ago |
dirkf | e8198c517b | 2 years ago |
dirkf | bafb6dec72 | 2 years ago |
dirkf | 4e04f10499 | 2 years ago |
dirkf | 90c9f789d9 | 2 years ago |
dirkf | 249f2b6316 | 2 years ago |
dirkf | d6b14ba316 | 2 years ago |
dirkf | 30e986b834 | 2 years ago |
dirkf | 58988c1421 | 2 years ago |
dirkf | e19ec52322 | 2 years ago |
dirkf | f2f90887ca | 2 years ago |
dirkf | cd987e6fca | 2 years ago |
dirkf | d947ffe8e3 | 2 years ago |
dirkf | 384f632e8a | 2 years ago |
dirkf | 9d17948b5a | 2 years ago |
afterdelight | f316f5d4e3 | 2 years ago |
dirkf | bc6f94e459 | 2 years ago |
Epsilonator | be3392a0d4 | 2 years ago |
zhangeric-15 | 6d829d8119 | 2 years ago |
Ruowang Sun | 98b0cf1cd0 | 2 years ago |
Leon Etienne | e9611a2a36 | 2 years ago |
JChris246 | 807e593a32 | 2 years ago |
Rodrigo Dias | 297fbff23b | 2 years ago |
Brian Marks | 37cbdfa0e7 | 2 years ago |
dirkf | 295736c9cb | 2 years ago |
pukkandan | 14ef89a8da | 2 years ago |
dirkf | 195f22f679 | 2 years ago |
dirkf | fc2beab0e7 | 2 years ago |
FraFraFra-LongD | 1a4fbe8462 | 2 years ago |
dirkf | c2f9be3e63 | 2 years ago |
dirkf | 604762a9f8 | 2 years ago |
Moises Lima | 47e70fff8b | 2 years ago |
dirkf | de39d1281c | 2 years ago |
Andrei Lebedev | 27ed77aabb | 2 years ago |
dirkf | c4b19a8816 | 2 years ago |
dirkf | 087ddc2371 | 2 years ago |
dirkf | 65ccb0dd4e | 2 years ago |
dirkf | a874871801 | 2 years ago |
dirkf | b7c25959f0 | 2 years ago |
dirkf | f102e3dc4e | 2 years ago |
dirkf | a19855f0f5 | 2 years ago |
Xie Yanbo | ce5d36486e | 2 years ago |
Xie Yanbo | d25cf62086 | 2 years ago |
dirkf | 502cefa41f | 2 years ago |
dirkf | 0faa45d6c0 | 2 years ago |
ache | 447edc48e6 | 2 years ago |
dirkf | ee8560d01e | 2 years ago |
dirkf | 7135277fec | 2 years ago |
dirkf | 7bbd5b13d4 | 2 years ago |
Xie Yanbo | c91cbf6072 | 2 years ago |
dirkf | 11b284c81f | 2 years ago |
dirkf | c94a459a24 | 2 years ago |
dirkf | 6e2626f092 | 2 years ago |
dirkf | c282e5f8d7 | 2 years ago |
dirkf | 2ced5a7912 | 2 years ago |
Xiyue | 82e4eca711 | 2 years ago |
dirkf | 1b1442887e | 2 years ago |
dirkf | 22127b271c | 2 years ago |
coletdjnz | d35557a75d | 2 years ago |
dirkf | 9493ffdb8b | 2 years ago |
pukkandan | 7009bb9f31 | 2 years ago |
dirkf | 218c423bc0 | 2 years ago |
dirkf | 55c823634d | 2 years ago |
dirkf | 4050e10a4c | 2 years ago |
dirkf | ed5c44e7b7 | 2 years ago |
dirkf | 0f6422590e | 2 years ago |
dirkf | 4c6fba3765 | 2 years ago |
dirkf | d619dd712f | 2 years ago |
dirkf | 573b13410e | 2 years ago |
dirkf | 66e58dccc2 | 2 years ago |
dirkf | 556862bc91 | 2 years ago |
gudata | a8d5316aaf | 2 years ago |
dirkf | fd3f3bebd0 | 2 years ago |
dirkf | 46b8ae2f52 | 2 years ago |
dirkf | 538ec65ba7 | 2 years ago |
dirkf | b0a60ce203 | 2 years ago |
dirkf | e52e8b8111 | 2 years ago |
dirkf | d231b56717 | 2 years ago |
dirkf | e6a836d54c | 2 years ago |
dirkf | deee741fb1 | 2 years ago |
Wes | adb5294177 | 2 years ago |
Kyraminol Endyeran | 5f5c127ece | 2 years ago |
dirkf | 090acd58c1 | 2 years ago |
dirkf | a03b9775d5 | 2 years ago |
dirkf | 8a158a936c | 2 years ago |
dirkf | 11665dd236 | 2 years ago |
dirkf | cc179df346 | 2 years ago |
pukkandan | 0700fde640 | 2 years ago |
dirkf | 811c480f7b | 2 years ago |
dirkf | 3aa94d7945 | 2 years ago |
dirkf | ef044be34b | 2 years ago |
dirkf | 530f4582d0 | 2 years ago |
pukkandan | 1baa0f5f66 | 2 years ago |
LewdyCoder | 9aa8e5340f | 2 years ago |
dirkf | 04fd3289d3 | 2 years ago |
dirkf | 52c3751df7 | 2 years ago |
dirkf | 187a48aee2 | 2 years ago |
Jacob Chapman | be35e5343a | 2 years ago |
dirkf | c3deca86ae | 2 years ago |
dirkf | c7965b9fc2 | 2 years ago |
dirkf | e988fa4523 | 2 years ago |
dirkf | e27d8d819f | 2 years ago |
Árni Dagur | ebc627847c | 2 years ago |
dirkf | a0068bd6be | 2 years ago |
dirkf | b764dbe773 | 3 years ago |
nixxo | 871645a4a4 | 3 years ago |
nixxo | 1f50a07771 | 3 years ago |
nixxo | 9e5ca66f16 | 3 years ago |
lihan7 | 17d295a1ec | 3 years ago |
dirkf | 49c5293014 | 3 years ago |
df | 6508688e88 | 3 years ago |
dirkf | 4194d253c0 | 3 years ago |
dirkf | f8e543c906 | 3 years ago |
dirkf | c4d1738316 | 3 years ago |
dirkf | 1f13ccfd7f | 3 years ago |
marieell | 923292ba64 | 3 years ago |
Lesmiscore (Naoya Ozaki) | 782bfd26db | 3 years ago |
Vladimir Stavrinov | 3472227074 | 3 years ago |
Petr Vaněk | bf23bc0489 | 3 years ago |
Petr Vaněk | 85bf26c1d0 | 3 years ago |
Petr Vaněk | d8adca1b66 | 3 years ago |
Petr Vaněk | d02064218b | 3 years ago |
Petr Vaněk | b1297308fb | 3 years ago |
Petr Vaněk | 8088ce036a | 3 years ago |
Petr Vaněk | 29f7bfc4d7 | 3 years ago |
dirkf | 74f8cc48af | 3 years ago |
kikuyan | 8ff961d10f | 3 years ago |
dirkf | 266b6ef185 | 3 years ago |
dirkf | 825d3426c5 | 3 years ago |
dirkf | 47b0c8697a | 3 years ago |
Seonghyeon Cho | 734dfbb4e3 | 3 years ago |
df | ddc080a562 | 3 years ago |
Abdullah Ibn Fulan | 16a3fe2ba6 | 3 years ago |
Abdullah Ibn Fulan | c820a284a2 | 3 years ago |
dirkf | 58babe9af7 | 3 years ago |
df | 6d4932f023 | 3 years ago |
dirkf | 92d73ef393 | 3 years ago |
dirkf | 91278f4b6b | 3 years ago |
dirkf | 73e1ab6125 | 3 years ago |
dirkf | 584715a803 | 3 years ago |
dirkf | e00b0eab1e | 3 years ago |
dirkf | 005339d637 | 3 years ago |
Chris Rose | 23ad6402a6 | 3 years ago |
dirkf | 9642344965 | 3 years ago |
dirkf | 568c7005d5 | 3 years ago |
dirkf | 5cb4833f40 | 3 years ago |
dirkf | 5197336de6 | 3 years ago |
dirkf | 01824d275b | 3 years ago |
dirkf | 39a98b09a2 | 3 years ago |
dirkf | f0a05a55c2 | 3 years ago |
dirkf | 4186e81777 | 3 years ago |
dirkf | b494824286 | 3 years ago |
dirkf | 8248133e5e | 3 years ago |
dirkf | 27dbf6f0ab | 3 years ago |
dirkf | 61d791726f | 3 years ago |
pukkandan | 0c0876f790 | 3 years ago |
dirkf | 7a497f1405 | 3 years ago |
dirkf | 5add3f4373 | 3 years ago |
pukkandan | 78ce962f4f | 3 years ago |
dirkf | 41f0043983 | 3 years ago |
dirkf | 34c06b16f5 | 3 years ago |
dirkf | 1e677567cd | 3 years ago |
df | af9e72507e | 3 years ago |
dirkf | 6ca7b77696 | 3 years ago |
dirkf | 9d142109f4 | 3 years ago |
dirkf | 1ca673bd98 | 3 years ago |
df | e1eae16b56 | 3 years ago |
df | 96f87aaa3b | 3 years ago |
df | 5f5de51a49 | 3 years ago |
df | 39ca35e765 | 3 years ago |
df | d76d59d99d | 3 years ago |
df | 2c2c2bd348 | 3 years ago |
df | 46e0a729b2 | 3 years ago |
df | 57044eaceb | 3 years ago |
pukkandan | a3373da70c | 3 years ago |
pukkandan | 2c4cb134a9 | 3 years ago |
pukkandan | bfe72723d8 | 3 years ago |
pukkandan | ed99d68bdd | 3 years ago |
Sergey M․ | 5014bd67c2 | 3 years ago |
Sergey M․ | e418823350 | 3 years ago |
lanegramling | b5242da7d2 | 3 years ago |
bopol | a803582717 | 3 years ago |
Remita Amine | 7fb9564420 | 3 years ago |
Aleri Kaisattera | 379f52a495 | 3 years ago |
Sergey M․ | cb668eb973 | 3 years ago |
Sergey M․ | 751c9ae39a | 3 years ago |
Sergey M․ | da32828208 | 3 years ago |
Sergey M․ | 2ccee8db74 | 3 years ago |
Sergey M․ | 47f2f2fbe9 | 3 years ago |
Sergey M․ | 03ab02730f | 3 years ago |
Tianyi Shi | 4c77a2e538 | 3 years ago |
bopol | 4131703001 | 3 years ago |
Logan B | cc21aebe90 | 3 years ago |
Sergey M․ | 57b9a4b4c6 | 3 years ago |
kikuyan | 3a7ef27cf3 | 3 years ago |
kikuyan | a7f61feab2 | 3 years ago |
kikuyan | 8fe5d54eb7 | 3 years ago |
kikuyan | d156bc8d59 | 3 years ago |
Sergey M | c2350cac24 | 3 years ago |
Sergey M․ | b224cf39d5 | 3 years ago |
Sergey M․ | 5f85eb820c | 3 years ago |
Sergey M․ | bb7ac1ed66 | 3 years ago |
Sergey M․ | fdf91c52a8 | 3 years ago |
Sergey M․ | 943070af4a | 3 years ago |
Remita Amine | 82f3993ba3 | 3 years ago |
Sergey M․ | d495292852 | 3 years ago |
Sergey M․ | 2ee6c7f110 | 3 years ago |
Sergey M․ | 6511b8e8d7 | 3 years ago |
Sergey M․ | f3cd1d9cec | 3 years ago |
phlip | e13a01061d | 3 years ago |
Sergey M․ | 24297a42ef | 3 years ago |
Remita Amine | 1980ff4550 | 3 years ago |
Remita Amine | dfbbe2902f | 3 years ago |
Remita Amine | e1a9d0ef78 | 3 years ago |
Sergey M․ | f47627a1c9 | 3 years ago |
Sergey M․ | efeb9e0fbf | 3 years ago |
Sergey M․ | e90a890f01 | 3 years ago |
Sergey M․ | 199c645bee | 3 years ago |
Sergey M․ | 503a3744ad | 3 years ago |
kr4ssi | ef03721f47 | 3 years ago |
Sergey M․ | 1e8aaa1d15 | 3 years ago |
Sergey M․ | 6423d7054e | 3 years ago |
Sergey M․ | eb5080286a | 3 years ago |
Sergey M․ | 286e01ce30 | 3 years ago |
Sergey M․ | 8536dcafd8 | 3 years ago |
Sergey M․ | 552b139911 | 3 years ago |
Lukas Anzinger | 2202cef0e4 | 3 years ago |
Sergey M․ | a726009987 | 3 years ago |
catboy | 03afef7538 | 3 years ago |
Jacob Chapman | b797c1cc75 | 3 years ago |
Sergey M․ | 04be55307a | 3 years ago |
Sergey M․ | 504e4d804d | 3 years ago |
Sergey M․ | 1786cd3fe4 | 3 years ago |
Ben Rog-Wilhelm | b8645c1f58 | 3 years ago |
Ben Rog-Wilhelm | fe05191b8c | 3 years ago |
Sergey M․ | 0204838163 | 3 years ago |
Sergey M․ | a0df8a0617 | 3 years ago |
Sergey M․ | d1b9a5e2ef | 3 years ago |
Sergey M․ | ff04d43c46 | 3 years ago |
Sergey M․ | d2f72c40db | 3 years ago |
Sergey M․ | e33dfb445c | 3 years ago |
Sergey M․ | 94520568b3 | 3 years ago |
Sergey M․ | 273964d190 | 3 years ago |
Sergey M․ | 346dd3b5e8 | 3 years ago |
schnusch | f5c2c06231 | 3 years ago |
Sergey M․ | 57eaaff5cf | 3 years ago |
Sergey M․ | 999329cf6b | 3 years ago |
catboy | c6ab792990 | 3 years ago |
The Hatsune Daishi | 0db79d8181 | 3 years ago |
Sergey M․ | 7e8b3f9439 | 3 years ago |
Sergey M․ | ac19c3ac80 | 3 years ago |
Sergey M․ | c4a451bcdd | 3 years ago |
Sergey M․ | 5ad69d3d0e | 3 years ago |
Sergey M․ | 32290307a4 | 3 years ago |
Sergey M․ | dab83a2597 | 3 years ago |
dirkf | 41920fc80e | 3 years ago |
Sergey M․ | 9f6c03a006 | 4 years ago |
Sergey M․ | 596b26606c | 4 years ago |
Sergey M․ | f20b505b46 | 4 years ago |
Sergey M․ | cfee2dfe83 | 4 years ago |
Sergey M․ | 30a3a4c70f | 4 years ago |
Sergey M․ | a00a7e0cad | 4 years ago |
Sergey M․ | 54558e0baa | 4 years ago |
Sergey M․ | 7c52395479 | 4 years ago |
zraktvor | ea87ed8394 | 4 years ago |
Cássio Ávila | d01e261a15 | 4 years ago |
quyleanh | 79e4ccfc4b | 4 years ago |
Sergey M․ | 06159135ef | 4 years ago |
Aaron Lipinski | 4fb25ff5a3 | 4 years ago |
Sergey M․ | 1b0a13f33c | 4 years ago |
Remita Amine | 27e5a4464d | 4 years ago |
Sergey M․ | 545d6cb9d0 | 4 years ago |
Remita Amine | 006eea564d | 4 years ago |
Remita Amine | 281b8e3443 | 4 years ago |
Remita Amine | c0c5134c57 | 4 years ago |
Sergey M․ | 72a2c0a9ed | 4 years ago |
Sergey M․ | 445db582a2 | 4 years ago |
Sergey M․ | 6b116f0c03 | 4 years ago |
Sergey M․ | 70d0d4f9be | 4 years ago |
Sergey M․ | 6b315d96bc | 4 years ago |
guredora | 25b1287323 | 4 years ago |
Remita Amine | 760c911299 | 4 years ago |
Remita Amine | 162bf9e10a | 4 years ago |
Remita Amine | 6beb1ac65b | 4 years ago |
Remita Amine | 3ae9c0f410 | 4 years ago |
Remita Amine | e165f5641f | 4 years ago |
RomanEmelyanov | aee6feb02a | 4 years ago |
Remita Amine | 654b4f4ff2 | 4 years ago |
Remita Amine | 1df2596f81 | 4 years ago |
Remita Amine | 04d4a3b136 | 4 years ago |
Allan Daemon | 392c467f95 | 4 years ago |
Vid | c5aa8f36bf | 4 years ago |
Remita Amine | 3748863070 | 4 years ago |
Sergey M․ | ca304beb15 | 4 years ago |
Sergey M․ | e789bb1aa4 | 4 years ago |
Sergey M․ | 14f29f087e | 4 years ago |
Remita Amine | b97fb2edac | 4 years ago |
Remita Amine | 28bab774a0 | 4 years ago |
Sergey M․ | 8f493de9fb | 4 years ago |
Sergey M․ | 207bc35d34 | 4 years ago |
Remita Amine | 955894e72f | 4 years ago |
Sergey M․ | 287e50b56b | 4 years ago |
Chris Hranj | da762c4e32 | 4 years ago |
Remita Amine | 87a8bde777 | 4 years ago |
Remita Amine | 49fc0a567f | 4 years ago |
Remita Amine | cc777dcaa0 | 4 years ago |
Remita Amine | c785911870 | 4 years ago |
Remita Amine | 605e7b5e47 | 4 years ago |
Remita Amine | 8562218350 | 4 years ago |
Sergey M․ | 76da1c954a | 4 years ago |
Sergey M․ | c2fbfb49da | 4 years ago |
Roman Sebastian Karwacik | d1069d33b4 | 4 years ago |
The Hatsune Daishi | eafcadea26 | 4 years ago |
Remita Amine | a40002444e | 4 years ago |
Sergey M․ | 5208ae92fc | 4 years ago |
Remita Amine | 8117d613ac | 4 years ago |
Martin Ström | 00b4d72d1e | 4 years ago |
Remita Amine | 21ccd0d7f4 | 4 years ago |
Sergey M․ | 7e79ba7dd6 | 4 years ago |
Remita Amine | fa6bf0a711 | 4 years ago |
Remita Amine | f912d6c8cf | 4 years ago |
Sergey M․ | 357bfe251d | 4 years ago |
Remita Amine | 3be098010f | 4 years ago |
Remita Amine | 9955bb4a27 | 4 years ago |
Sergey M․ | ebfd66c4b1 | 4 years ago |
Sergey M․ | b509d24b2f | 4 years ago |
Sergey M․ | 1860d0f41c | 4 years ago |
Remita Amine | 60845121ca | 4 years ago |
Remita Amine | 1182f9567b | 4 years ago |
Remita Amine | ef414343e5 | 4 years ago |
Remita Amine | 43d986acd8 | 4 years ago |
Remita Amine | 9c644a6419 | 4 years ago |
Remita Amine | fc2c6d5323 | 4 years ago |
Remita Amine | 64ed3af328 | 4 years ago |
Sergey M․ | bae7dbf78b | 4 years ago |
Sergey M․ | 15c24b0346 | 4 years ago |
Sergey M․ | 477bff6906 | 4 years ago |
Sergey M․ | 1a1ccd9a6e | 4 years ago |
Sergey M․ | 7dc513487f | 4 years ago |
Remita Amine | c6a14755bb | 4 years ago |
Remita Amine | 7f064d50db | 4 years ago |
Remita Amine | b8b622fbeb | 4 years ago |
Remita Amine | ec64ec9651 | 4 years ago |
Sergey M․ | f68692b004 | 4 years ago |
Sergey M․ | 8c9766f4bf | 4 years ago |
Sergey M․ | 061c030133 | 4 years ago |
Remita Amine | 8f56907afa | 4 years ago |
Remita Amine | e1adb3ed4f | 4 years ago |
dirkf | e465b25c1f | 4 years ago |
Sergey M․ | 7c06216abf | 4 years ago |
Sergey M․ | 0002888627 | 4 years ago |
Sergey M․ | 3fb14cd214 | 4 years ago |
Remita Amine | bee6182680 | 4 years ago |
Remita Amine | 38fe5e239a | 4 years ago |
Remita Amine | 678d46f6bb | 4 years ago |
Alexander Seiler | 3c58f9e0b9 | 4 years ago |
Remita Amine | ef28e33249 | 4 years ago |
nixxo | 9662e4964b | 4 years ago |
Remita Amine | 44603290e5 | 4 years ago |
Remita Amine | 1631fca1ee | 4 years ago |
Remita Amine | 295860ff00 | 4 years ago |
Sergey M․ | 8cb4b71909 | 4 years ago |
Remita Amine | d81421af4b | 4 years ago |
nixxo | 7422a2194f | 4 years ago |
Remita Amine | 2090dbdc8c | 4 years ago |
Sergey M․ | 0a04e03a02 | 4 years ago |
Sergey M․ | 44b2d5f5fc | 4 years ago |
Sergey M․ | aa9118a373 | 4 years ago |
Adrian Heine | 36abc16c3c | 4 years ago |
Sergey M․ | 919d764600 | 4 years ago |
piplongrun | 696183e133 | 4 years ago |
SirCipherz | f90d825a6b | 4 years ago |
Remita Amine | 3037ab00c7 | 4 years ago |
Isaac-the-Man | 21e872b19a | 4 years ago |
Remita Amine | cf2dbec630 | 4 years ago |
Remita Amine | b92bb0e02a | 4 years ago |
Remita Amine | 40edffae3d | 4 years ago |
Sergey M․ | 9fc5eafb8e | 4 years ago |
bopol | 08c2fbb844 | 4 years ago |
Remita Amine | 3997efb65e | 4 years ago |
Remita Amine | a7356dffe9 | 4 years ago |
dmsummers | e20ec43094 | 4 years ago |
PrinceOfPuppers | 70baa7bfae | 4 years ago |
PrinceOfPuppers | 8980f53b42 | 4 years ago |
Sergey M․ | a363fb5d28 | 4 years ago |
Max | 646052e416 | 4 years ago |
Stephen Stair | 844e4cbc54 | 4 years ago |
Remita Amine | 56c63c8c02 | 4 years ago |
Sergey M․ | 07eb8f1916 | 4 years ago |
Remita Amine | 4b5410c5c8 | 4 years ago |
Remita Amine | be2e9b76ee | 4 years ago |
Remita Amine | d8085580f6 | 4 years ago |
Remita Amine | 6d32c6c6d3 | 4 years ago |
Sergey M․ | f94d764993 | 4 years ago |
Kevin Velghe | f28f1b4d6e | 4 years ago |
Sergey M․ | 360d5f0daa | 4 years ago |
Sergey M․ | cd493c5adc | 4 years ago |
Sergey M․ | a4c7ed6b1e | 4 years ago |
Remita Amine | 7f8b8bc418 | 4 years ago |
Sergey M․ | 311ebdd9a5 | 4 years ago |
Remita Amine | 99c68db0a8 | 4 years ago |
Sergey M․ | 5fc53690cb | 4 years ago |
Sergey M․ | 7a9161578e | 4 years ago |
Adrian Heine né Lang | 2405854705 | 4 years ago |
Sergey M․ | 0cf09c2b41 | 4 years ago |
Sergey M․ | 0156ce95c5 | 4 years ago |
Remita Amine | 1641b13232 | 4 years ago |
Sergey M․ | a4bdc3112b | 4 years ago |
Sergey M․ | c7d407bca2 | 4 years ago |
Sergey M․ | 7215691ab7 | 4 years ago |
Adrian Heine né Lang | fc88e8f0e3 | 4 years ago |
Sergey M․ | cfefb7d854 | 4 years ago |
Sergey M․ | 3c07d007ca | 4 years ago |
Sergey M․ | 89c5a7d5aa | 4 years ago |
Sergey M․ | 2adc0c51cd | 4 years ago |
Sergey M․ | 1f0910bc27 | 4 years ago |
Sergey M․ | e22ff4e356 | 4 years ago |
Sergey M․ | 83031d749b | 4 years ago |
Remita Amine | 1b731ebcaa | 4 years ago |
Remita Amine | ab25f3f431 | 4 years ago |
Guillem Vela | 07f7aad81c | 4 years ago |
Remita Amine | 1e2575df87 | 4 years ago |
Remita Amine | b111a64135 | 4 years ago |
Viren Rajput | 0e3a968479 | 4 years ago |
Remita Amine | c11f7cf9bd | 4 years ago |
Remita Amine | 8fa7cc387d | 4 years ago |
Remita Amine | 65eee5a745 | 4 years ago |
Remita Amine | efef4ddf51 | 4 years ago |
Remita Amine | 159a3d48df | 4 years ago |
Remita Amine | b46483a6ec | 4 years ago |
Remita Amine | 9c724601ba | 4 years ago |
Remita Amine | 67299f23d8 | 4 years ago |
Adrian Heine né Lang | 8bf9591a70 | 4 years ago |
nixxo | a800838f5a | 4 years ago |
Remita Amine | ba15b2fee6 | 4 years ago |
ping | 56a7ee9033 | 4 years ago |
Remita Amine | 0b4f03a563 | 4 years ago |
knapior | 7b8fa658f8 | 4 years ago |
Adrian Heine né Lang | fd95fc33b1 | 4 years ago |
Remita Amine | c669554ef5 | 4 years ago |
Remita Amine | 11b68df7a4 | 4 years ago |
Adrian Heine né Lang | d18f4419a7 | 4 years ago |
Remita Amine | 0f7d413d5b | 4 years ago |
Remita Amine | 286e5d6724 | 4 years ago |
tpikonen | 395981288b | 4 years ago |
Remita Amine | 55bb3556c8 | 4 years ago |
Remita Amine | 57f2488bbe | 4 years ago |
Adrian Heine né Lang | ea399a53eb | 4 years ago |
Sergey M․ | 811a183eb6 | 4 years ago |
Sergey M․ | b63981e850 | 4 years ago |
Sergey M․ | 186cbaffb9 | 4 years ago |
Sergey M․ | dbf3fa8af6 | 4 years ago |
Sergey M․ | f08c31cf33 | 4 years ago |
Aurélien Grosdidier | d8dab85419 | 4 years ago |
Sergey M․ | 5519bba3e1 | 4 years ago |
Sergey M․ | 142c584063 | 4 years ago |
aarubui | 4542e3e555 | 4 years ago |
Remita Amine | fa8f6d8580 | 4 years ago |
Remita Amine | 3bb7769c40 | 4 years ago |
Remita Amine | 8d286bd5b6 | 4 years ago |
Remita Amine | cff72b4cc0 | 4 years ago |
Brian Marks | 657221c81d | 4 years ago |
Remita Amine | 62acf5fa2c | 4 years ago |
Remita Amine | b79977fb6b | 4 years ago |
Remita Amine | bc7c8f3d4e | 4 years ago |
Remita Amine | 015e19b350 | 4 years ago |
Remita Amine | 54856480d7 | 4 years ago |
DrWursterich | 1dd12708c2 | 4 years ago |
Brian Marks | f9201cef58 | 4 years ago |
Remita Amine | 26499ba823 | 4 years ago |
Remita Amine | 58f6c2112d | 4 years ago |
Tatsh | de026a6acd | 4 years ago |
Adrian Heine né Lang | d4564afc70 | 4 years ago |
Adrian Heine né Lang | 360a5e0f60 | 4 years ago |
Remita Amine | 55a3ca16d3 | 4 years ago |
Sergey M․ | ef50cb3fda | 4 years ago |
Sergey M․ | 8673f4344c | 4 years ago |
Sergey M․ | f1487d4fca | 4 years ago |
Sergey M․ | 0cd4c402f0 | 4 years ago |
Sergey M․ | 9c9b458145 | 4 years ago |
Remita Amine | 9d50f86232 | 4 years ago |
Sergey M․ | 7e92f9015e | 4 years ago |
Sergey M․ | aa860b8016 | 4 years ago |
Sergey M․ | b484097b01 | 4 years ago |
Sergey M․ | ab9001dab5 | 4 years ago |
main() | 879866a230 | 4 years ago |
Aaron Zeng | 8e5477d036 | 4 years ago |
Sergey M․ | 1e8e5d5238 | 4 years ago |
Sergey M․ | d81a213cfb | 4 years ago |
Aarni Koskela | 7c2d18a13f | 4 years ago |
Remita Amine | 2408e6d26a | 4 years ago |
Remita Amine | cf862771d7 | 4 years ago |
Adrian Heine né Lang | a938f111ed | 4 years ago |
Remita Amine | 4759543f6e | 4 years ago |
Sergey M․ | d0fc289f45 | 4 years ago |
Sergey M․ | 70f572585d | 4 years ago |
Sergey M․ | c2d06aef60 | 4 years ago |
Remita Amine | ff1e765400 | 4 years ago |
0l-l0 | 170e1c1995 | 4 years ago |
Remita Amine | 61e669acff | 4 years ago |
Remita Amine | 2c337f4e85 | 4 years ago |
Sergey M․ | bf6a74c620 | 4 years ago |
Sergey M․ | 38a967c98e | 4 years ago |
nixxo | 3a61e6d360 | 4 years ago |
Remita Amine | 3d8e32dcc0 | 4 years ago |
Remita Amine | 8f29b2dd38 | 4 years ago |
Remita Amine | a29e340efa | 4 years ago |
Remita Amine | b13f29098f | 4 years ago |
Remita Amine | 430c4bc9d0 | 4 years ago |
Sergey M․ | 4ae243fc6c | 4 years ago |
Sergey M․ | 8f20ad36dc | 4 years ago |
Sergey M․ | 799c794947 | 4 years ago |
Remita Amine | 1ae7ae0b96 | 4 years ago |
Sergey M․ | ccc7112291 | 4 years ago |
23rd | 5b24f8f505 | 4 years ago |
nixxo | fcd90d2583 | 4 years ago |
Remita Amine | 8f757c7353 | 4 years ago |
Remita Amine | be1a3f2d11 | 4 years ago |
Sergey M․ | ecae54a98d | 4 years ago |
cladmi | f318882955 | 4 years ago |
Sergey M․ | c3399cac19 | 4 years ago |
Sergey M․ | 9237aaa77f | 4 years ago |
Kevin O'Connor | 766fcdd0fa | 4 years ago |
Yurii H | f6ea29e24b | 4 years ago |
Remita Amine | 8a3797a4ab | 4 years ago |
Remita Amine | 745db8899d | 4 years ago |
Remita Amine | 83db801cbf | 4 years ago |
Remita Amine | 964a8eb754 | 4 years ago |
Remita Amine | ac61f2e058 | 4 years ago |
Remita Amine | 8487e8b98a | 4 years ago |
Remita Amine | 9c484c0019 | 4 years ago |
Remita Amine | 0e96b4b5ce | 4 years ago |
Remita Amine | a563c97c5c | 4 years ago |
Remita Amine | e88c9ef62a | 4 years ago |
Sergey M․ | 0889eb33e0 | 4 years ago |
Sergey M․ | 0021a2b9a1 | 4 years ago |
Remita Amine | 19ec468635 | 4 years ago |
Remita Amine | 491ee7efe4 | 4 years ago |
Remita Amine | 8522bcd97c | 4 years ago |
Remita Amine | ac71fd5919 | 4 years ago |
Sergey M․ | 8e953dcbb1 | 4 years ago |
Sergey M․ | f4afb9a6a8 | 4 years ago |
Sergey M․ | d5b8cf093c | 4 years ago |
Sergey M․ | 5c6e84c0ff | 4 years ago |
Sergey M․ | 1aaee908b9 | 4 years ago |
Sergey M․ | b2d9fd9c9f | 4 years ago |
Sergey M․ | bc2f83b95e | 4 years ago |
Sergey M․ | 85de33b04e | 4 years ago |
Sergey M․ | 7dfd966848 | 4 years ago |
Sergey M․ | a25d03d7cb | 4 years ago |
Sergey M․ | cabfd4b1f0 | 4 years ago |
Sergey M․ | 7b643d4cd0 | 4 years ago |
Sergey M․ | 1f1d01d498 | 4 years ago |
Sergey M․ | 21a42e2588 | 4 years ago |
Sergey M․ | 2df93a0c4a | 4 years ago |
Remita Amine | 75972e200d | 4 years ago |
Remita Amine | d0d838638c | 4 years ago |
Remita Amine | 8c17afc471 | 4 years ago |
Remita Amine | 40d66e07df | 4 years ago |
Remita Amine | ab89a8678b | 4 years ago |
Remita Amine | 4d7d056909 | 4 years ago |
Remita Amine | c35bc82606 | 4 years ago |
Remita Amine | 2f56caf083 | 4 years ago |
Sergey M․ | 4066945919 | 4 years ago |
Sergey M․ | 2a84694b1e | 4 years ago |
Sergey M․ | 4046ffe1e1 | 4 years ago |
ozburo | d1d0612160 | 4 years ago |
Remita Amine | 7b0f04ed1f | 4 years ago |
nixxo | 2e21b06ea2 | 4 years ago |
Remita Amine | a6f75e6e89 | 4 years ago |
Remita Amine | bd18824c2a | 4 years ago |
Remita Amine | bdd044e67b | 4 years ago |
Remita Amine | f7e95fb2a0 | 4 years ago |
Remita Amine | 9dd674e1d2 | 4 years ago |
Remita Amine | 9c1e164e0c | 4 years ago |
Remita Amine | c706fbe9fe | 4 years ago |
Remita Amine | ebdcf70b0d | 4 years ago |
Remita Amine | 5966095e65 | 4 years ago |
Remita Amine | 9ee984fc76 | 4 years ago |
Remita Amine | 53528e1d23 | 4 years ago |
Remita Amine | c931c4b8dd | 4 years ago |
Remita Amine | 7acd042bbb | 4 years ago |
Remita Amine | bcfe485e01 | 4 years ago |
Sergey M․ | 479cc6d5a1 | 4 years ago |
Sergey M․ | 38286ee729 | 4 years ago |
Sergey M․ | 1a95953867 | 4 years ago |
Sergey M․ | 71febd1c52 | 4 years ago |
Sergey M․ | f1bc56c99b | 4 years ago |
Remita Amine | 64e419bd73 | 4 years ago |
Remita Amine | 782ea947b4 | 4 years ago |
Remita Amine | f27224d57b | 4 years ago |
Remita Amine | c007188598 | 4 years ago |
Remita Amine | af93ecfd88 | 4 years ago |
JamKage | 794771a164 | 4 years ago |
Sergey M․ | 6f2eaaf73d | 4 years ago |
Remita Amine | 4c7a4dbc4d | 4 years ago |
Remita Amine | f86b299d0e | 4 years ago |
Sergey M | e474996541 | 4 years ago |
Remita Amine | aed617e311 | 4 years ago |
Remita Amine | 0fa67c1d68 | 4 years ago |
Sergey M․ | 365b3cc72d | 4 years ago |
Sergey M․ | a272fe21a8 | 4 years ago |
Sergey M․ | cec1c2f211 | 4 years ago |
Sergey M․ | 12053450dc | 4 years ago |
Sergey M․ | 46cffb0c47 | 4 years ago |
Remita Amine | c32a059f52 | 4 years ago |
Sergey M․ | 6911312e53 | 4 years ago |
Sergey M․ | f22b5a6b96 | 4 years ago |
Andrew Udvare | 58e55198c1 | 4 years ago |
Sergey M․ | d61ed9f2f1 | 4 years ago |
Sergey M․ | 8bc4c6350e | 4 years ago |
Sergey M․ | cfa4ffa23b | 4 years ago |
Sergey M․ | 4f1dc1463d | 4 years ago |
Sergey M․ | 17e0f41d34 | 4 years ago |
JChris246 | b57b27ff8f | 4 years ago |
Marco Fantauzzo | bbe8cc6662 | 4 years ago |
Sergey M․ | 98106accb6 | 4 years ago |
Sergey M․ | af1312bfc3 | 4 years ago |
Remita Amine | 4c7d7215cd | 4 years ago |
Remita Amine | 0370d9eb3d | 4 years ago |
Remita Amine | 1434651d20 | 4 years ago |
Remita Amine | 2c312ab84a | 4 years ago |
Remita Amine | 0ee78d62d5 | 4 years ago |
Remita Amine | 7f3c90ab25 | 4 years ago |
Remita Amine | 1d3cd29730 | 4 years ago |
Remita Amine | 4ef1fc9707 | 4 years ago |
Remita Amine | f9e6aa1dcf | 4 years ago |
Remita Amine | f83db9064b | 4 years ago |
Remita Amine | 2da9a86399 | 4 years ago |
Remita Amine | ecaa535cf4 | 4 years ago |
Remita Amine | 79dd92b1fe | 4 years ago |
Remita Amine | bd3844c9c2 | 4 years ago |
Sergey M․ | 7bf5e3a84a | 4 years ago |
Sergey M․ | 08a17dae5b | 4 years ago |
Sergey M․ | 924ea66ade | 4 years ago |
Remita Amine | 5b72f5b74f | 4 years ago |
Remita Amine | bfa345744d | 4 years ago |
Remita Amine | f966461476 | 4 years ago |
Remita Amine | b8aea53682 | 4 years ago |
Remita Amine | c0d9eb7043 | 4 years ago |
Remita Amine | 3ba6aabd25 | 4 years ago |
Sergey M․ | a8b31505ed | 4 years ago |
Remita Amine | 90a271e914 | 4 years ago |
Remita Amine | 172d270607 | 4 years ago |
Remita Amine | 22feed08a1 | 4 years ago |
Sergey M․ | 942b8ca3be | 4 years ago |
Sergey M․ | 3729c52f9d | 4 years ago |
renalid | 71679eaee8 | 4 years ago |
Trevor Nelson | 76fe4ba3b2 | 4 years ago |
Remita Amine | 164a4a5756 | 4 years ago |
Remita Amine | 455951985b | 4 years ago |
Remita Amine | c29500e412 | 4 years ago |
Remita Amine | 1bc1520adc | 4 years ago |
Remita Amine | 022e05dc1f | 4 years ago |
Remita Amine | b34c9551aa | 4 years ago |
Remita Amine | 84f19c026f | 4 years ago |
Remita Amine | 6bde5492b6 | 4 years ago |
Remita Amine | 6086df4d6a | 4 years ago |
Sergey M․ | c98052c5da | 4 years ago |
Sergey M․ | ab62bc5838 | 4 years ago |
Sergey M․ | bc87ba8424 | 4 years ago |
Remita Amine | b79df1b68d | 4 years ago |
Matthew Rayermann | 2797c7be45 | 4 years ago |
Sergey M․ | 755f186e21 | 4 years ago |
Sergey M․ | 2240a1dc4d | 4 years ago |
Sergey M․ | 03d3af9768 | 4 years ago |
Sergey M․ | 5ce9527e16 | 4 years ago |
Sergey M․ | c527f5ada0 | 4 years ago |
Sergey M․ | ace52668f0 | 4 years ago |
Sergey M․ | 9c33eb027e | 4 years ago |
Sergey M․ | 679b711395 | 4 years ago |
Sergey M․ | 1727541315 | 4 years ago |
Sergey M․ | 45b0a0d11b | 4 years ago |
spvkgn | e665fcd4da | 4 years ago |
Remita Amine | aae737d4af | 4 years ago |
Remita Amine | 92a6de861e | 4 years ago |
Remita Amine | 5ff881aee6 | 4 years ago |
Sergey M․ | eae19a4473 | 4 years ago |
Sergey M․ | f70c263ce5 | 4 years ago |
Remita Amine | 92d135921f | 4 years ago |
Sergey M․ | d8008dee4f | 4 years ago |
Remita Amine | bb38a12157 | 4 years ago |
toniz4 | bcc8ef0a5a | 4 years ago |
Sergey M․ | 3cb1a5dc73 | 4 years ago |
Sergey M․ | ed40c02c9b | 4 years ago |
Sergey M․ | b5fdceb4d1 | 4 years ago |
Sergey M․ | 1e6f7f3b45 | 4 years ago |
Sergey M․ | 469ff02f4e | 4 years ago |
Sergey M․ | 56f1c5ad38 | 4 years ago |
Remita Amine | 3a6b8f4edf | 4 years ago |
Sergey M․ | 3ae02d4a62 | 4 years ago |
Remita Amine | 59fea63dc2 | 4 years ago |
Sergey M․ | 22933e02d4 | 4 years ago |
Remita Amine | 98a62d7cbd | 4 years ago |
Remita Amine | d6c5fdef34 | 4 years ago |
compujo | 4b9051cf39 | 4 years ago |
Remita Amine | 00f5068908 | 4 years ago |
Remita Amine | 432c6b0f48 | 4 years ago |
Remita Amine | ad5e5788ff | 4 years ago |
Remita Amine | dc65041c22 | 4 years ago |
Remita Amine | 9f88b07945 | 4 years ago |
Remita Amine | 225646c4ca | 4 years ago |
Remita Amine | 5c6e9f0b6c | 4 years ago |
Remita Amine | 6c370bc149 | 4 years ago |
Remita Amine | 37fd242342 | 4 years ago |
Remita Amine | aee1f87168 | 4 years ago |
Andrey Smirnoff | b69bb1ed11 | 4 years ago |
Sergey M․ | 772cefef8c | 4 years ago |
谭九鼎 | 842654b6d0 | 4 years ago |
Remita Amine | df5e50954b | 4 years ago |
Sergey M․ | a4a2fa8754 | 4 years ago |
Sergey M․ | 9da0504a09 | 4 years ago |
Sergey M․ | 470cf496f5 | 4 years ago |
Sergey M․ | e029da9add | 4 years ago |
Sergey M․ | e00b8f60d4 | 4 years ago |
EntranceJew | 644c3ef886 | 4 years ago |
Soneé John | 9d8d0f8b4a | 4 years ago |
Sergey M․ | 5a1fbbf8b7 | 4 years ago |
Remita Amine | e2bdf8bf4f | 4 years ago |
Remita Amine | c368dc98e0 | 4 years ago |
Remita Amine | e7eff914cd | 4 years ago |
Remita Amine | 07333d0062 | 4 years ago |
Sergey M․ | 5bd7ad2e81 | 4 years ago |
Remita Amine | 3ded751985 | 4 years ago |
Remita Amine | 6956db3606 | 4 years ago |
Remita Amine | 17b01228f8 | 4 years ago |
Sergey M․ | 4f1ecca58d | 4 years ago |
Sergey M․ | 2717036489 | 4 years ago |
Sergey M․ | d9482c0083 | 4 years ago |
Sergey M․ | 791b743765 | 4 years ago |
Remita Amine | fa604d9083 | 4 years ago |
Sergey M․ | 2bf0634d16 | 4 years ago |
Sergey M․ | dccf4932e1 | 4 years ago |
Sergey M․ | 91dd25fe1e | 4 years ago |
Sergey M․ | 06bf2ac20f | 4 years ago |
Sergey M․ | 6ad0d8781e | 4 years ago |
Sergey M․ | f2c704e112 | 4 years ago |
Sergey M․ | 5e822c2526 | 4 years ago |
renalid | cc017e07ca | 4 years ago |
Remita Amine | 082da36416 | 4 years ago |
Remita Amine | 6bf95b15ee | 4 years ago |
Remita Amine | 4c93b2fd15 | 4 years ago |
Remita Amine | 1b26bfd425 | 4 years ago |
Sergey M․ | 13ec444a98 | 4 years ago |
Sergey M․ | 51579d87e4 | 4 years ago |
Sergey M․ | e147619669 | 4 years ago |
Sergey M․ | e7f93fbd85 | 4 years ago |
Sergey M․ | 58f7ada235 | 4 years ago |
Sergey M․ | c67b33888f | 4 years ago |
Sergey M․ | e8c0af04b7 | 4 years ago |
Sergey M․ | 5d769860c3 | 4 years ago |
Sergey M․ | 02b04785ee | 4 years ago |
Sergey M․ | 41c92b8d02 | 4 years ago |
Remita Amine | fe0c28f956 | 4 years ago |
Remita Amine | 957c65b9ea | 4 years ago |
Remita Amine | 5e95e18ce9 | 4 years ago |
Sergey M․ | e91df0c550 | 4 years ago |
Sergey M․ | c5636e9bca | 4 years ago |
Remita Amine | 2e47264235 | 4 years ago |
Sergey M․ | 1c78cb118c | 4 years ago |
Matthew Rayermann | beab2f88c9 | 4 years ago |
Remita Amine | 0025447369 | 4 years ago |
Remita Amine | da4304609d | 4 years ago |
Remita Amine | ea89680aea | 4 years ago |
Remita Amine | 664dd8ba85 | 4 years ago |
Remita Amine | 64554c12e1 | 4 years ago |
opusforlife2 | 4ded9c0f00 | 4 years ago |
Remita Amine | c0820dd52a | 4 years ago |
Sergey M․ | 2bb70750a9 | 4 years ago |
Sergey M․ | 09d923f2eb | 4 years ago |
Remita Amine | 37d979ad33 | 4 years ago |
Remita Amine | 95ac4de229 | 4 years ago |
Remita Amine | d3e142b3fa | 4 years ago |
Sergey M․ | 132aece1ed | 4 years ago |
Sergey M․ | 3e4e338133 | 4 years ago |
Roman Beránek | be19ae11fd | 4 years ago |
Sergey M․ | 59d63d8d4a | 4 years ago |
Remita Amine | cfeba5d17f | 4 years ago |
Sergey M․ | 6da0e5e7a2 | 4 years ago |
Sergey M․ | d6ce649f15 | 4 years ago |
Sergey M․ | b449b73dcc | 4 years ago |
Sergey M․ | 16c822e91e | 4 years ago |
Michael Munch | 4318170779 | 4 years ago |
Sergey M․ | fb626c0586 | 4 years ago |
bopol | 717d1d2d5a | 4 years ago |
Sergey M․ | 9585b376db | 4 years ago |
JChris246 | f04cfe24e0 | 4 years ago |
Sergey M․ | 20c50c6556 | 4 years ago |
Remita Amine | f9f9699f2f | 4 years ago |
Adrian Heine né Lang | a3cf22e590 | 4 years ago |
Remita Amine | 99de2f38d3 | 4 years ago |
Sergey M․ | 9fe50837c3 | 4 years ago |
Sergey M․ | 4dc545553f | 4 years ago |
Sergey M․ | 686e898fde | 4 years ago |
Remita Amine | 3a78198a96 | 4 years ago |
Remita Amine | 836c810716 | 4 years ago |
Remita Amine | 97c5be383c | 4 years ago |
Remita Amine | a7ea88537a | 4 years ago |
Remita Amine | 6c35de4c6b | 4 years ago |
Remita Amine | 579d43951d | 4 years ago |
Joshua Lochner | e24ebeafd1 | 4 years ago |
Remita Amine | 01c92973dd | 4 years ago |
Sergey M․ | f4415faa46 | 4 years ago |
Sergey M․ | a86ce9d7a1 | 4 years ago |
Sergey M․ | 37258c644f | 4 years ago |
Sergey M․ | d0512ac4c5 | 4 years ago |
Sergey M․ | da4eaa15a2 | 4 years ago |
Sergey M․ | 191286265d | 4 years ago |
Sergey M․ | 3234272818 | 4 years ago |
Sergey M․ | 9d2c90354f | 4 years ago |
Sergey M․ | 316b10855a | 4 years ago |
Remita Amine | 484fe78737 | 4 years ago |
Remita Amine | 2cd43a00d1 | 4 years ago |
renalid | dd0f524c69 | 4 years ago |
Remita Amine | c84f9475b8 | 4 years ago |
Jia Rong Yee | 15f2734791 | 4 years ago |
Remita Amine | cb6e24f946 | 4 years ago |
Remita Amine | 9d531aa291 | 4 years ago |
Remita Amine | e9cbb98a0f | 4 years ago |
Remita Amine | 193422e12a | 4 years ago |
Sergey M․ | c4cabf040e | 4 years ago |
Sergey M․ | f23eceebbf | 4 years ago |
Sergey M․ | 650bd8f623 | 4 years ago |
Sergey M․ | 5d8cb4367d | 4 years ago |
Sergey M․ | 82abc13aed | 4 years ago |
Sergey M․ | 1e72660c9b | 4 years ago |
Sergey M․ | 049f224248 | 4 years ago |
Mattias Wadman | 0ada1b90b8 | 4 years ago |
Josh Soref | 71ddc222ad | 4 years ago |
Daniel Peukert | 21292c0649 | 4 years ago |
Sergey M․ | 46a265a2da | 4 years ago |
Sergey M․ | e2096776b9 | 4 years ago |
Remita Amine | a1c88c4819 | 4 years ago |
renalid | 51ca93d751 | 4 years ago |
Sergey M․ | a7e0531999 | 4 years ago |
Sergey M․ | ab0eda99e1 | 4 years ago |
Sergey M․ | ec99f47108 | 4 years ago |
Sergey M․ | b31b5f4434 | 4 years ago |
Remita Amine | 86f2fa1590 | 4 years ago |
Remita Amine | 7bc7fbce23 | 4 years ago |
Remita Amine | a78e530c14 | 4 years ago |
Remita Amine | af7bb684c0 | 4 years ago |
Leonardo Taccari | dd9e0f58f3 | 4 years ago |
Remita Amine | 59e583f7e8 | 4 years ago |
beefchop | daa25d4142 | 4 years ago |
Remita Amine | 25a35cb38a | 4 years ago |
Remita Amine | 2cf8003638 | 4 years ago |
Joost Verdoorn | cf1a8668e8 | 4 years ago |
Remita Amine | 2dbb45ae82 | 4 years ago |
Remita Amine | 4fe190df70 | 4 years ago |
Sergey M․ | 039e715b30 | 4 years ago |
Sergey M․ | 32152bab7a | 4 years ago |
Sergey M․ | b1347a5881 | 4 years ago |
Sergey M․ | 91e954587f | 4 years ago |
Sergey M․ | 9b505185da | 4 years ago |
Remita Amine | 444a68e0ec | 4 years ago |
Sergey M․ | c7178f0f7a | 4 years ago |
Remita Amine | 5c3f7014ef | 4 years ago |
Remita Amine | d65628ef03 | 4 years ago |
gdzx | 8a6c5b0806 | 4 years ago |
Sergey M․ | 9360936f26 | 4 years ago |
Sergey M․ | bb2b89e077 | 4 years ago |
Sergey M․ | 2d7a29081c | 4 years ago |
Sergey M․ | 2864179293 | 4 years ago |
Remita Amine | 284f8306df | 4 years ago |
Remita Amine | aa613ef7e1 | 4 years ago |
Remita Amine | cb2b9a22a5 | 4 years ago |
Remita Amine | 5b867c15a8 | 4 years ago |
Sergey M․ | f8c749f12c | 4 years ago |
Sergey M․ | 7d509c613b | 4 years ago |
Sergey M․ | b92e95aa01 | 4 years ago |
Sergey M․ | e3cad6bd99 | 4 years ago |
Sergey M․ | 6699b6ce41 | 4 years ago |
Remita Amine | 91dcde8a38 | 4 years ago |
Sergey M․ | 11f3471c4b | 4 years ago |
Sergey M․ | f4093b34f6 | 4 years ago |
Sergey M․ | a80b23c373 | 4 years ago |
Sergey M․ | 2e7fa18bb9 | 4 years ago |
Sergey M․ | fe07e788bf | 4 years ago |
Remita Amine | 6d3bdcf217 | 4 years ago |
Remita Amine | ec2a2ab441 | 4 years ago |
Remita Amine | 9448a20312 | 4 years ago |
Remita Amine | 3f1748b944 | 4 years ago |
Remita Amine | fe13087cd1 | 4 years ago |
Remita Amine | f22fa82d7f | 4 years ago |
Remita Amine | 059fa9aa81 | 4 years ago |
Remita Amine | 650aec4a98 | 4 years ago |
Remita Amine | 2ea9c97432 | 4 years ago |
Edward Betts | efc589b865 | 4 years ago |
Remita Amine | 1737ea69b9 | 4 years ago |
Sergey M․ | 1fb034d029 | 4 years ago |
@ -0,0 +1 @@
|
||||
blank_issues_enabled: false
|
@ -0,0 +1,479 @@
|
||||
name: CI
|
||||
|
||||
env:
|
||||
all-cpython-versions: 2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, 3.7, 3.8, 3.9, 3.10, 3.11, 3.12
|
||||
main-cpython-versions: 2.7, 3.2, 3.5, 3.9, 3.11
|
||||
pypy-versions: pypy-2.7, pypy-3.6, pypy-3.7
|
||||
cpython-versions: main
|
||||
test-set: core
|
||||
# Python beta version to be built using pyenv before setup-python support
|
||||
# Must also be included in all-cpython-versions
|
||||
next: 3.13
|
||||
|
||||
on:
|
||||
push:
|
||||
# push inputs aren't known to GitHub
|
||||
inputs:
|
||||
cpython-versions:
|
||||
type: string
|
||||
default: all
|
||||
test-set:
|
||||
type: string
|
||||
default: core
|
||||
pull_request:
|
||||
# pull_request inputs aren't known to GitHub
|
||||
inputs:
|
||||
cpython-versions:
|
||||
type: string
|
||||
default: main
|
||||
test-set:
|
||||
type: string
|
||||
default: both
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
cpython-versions:
|
||||
type: choice
|
||||
description: CPython versions (main = 2.7, 3.2, 3.5, 3.9, 3.11)
|
||||
options:
|
||||
- all
|
||||
- main
|
||||
required: true
|
||||
default: main
|
||||
test-set:
|
||||
type: choice
|
||||
description: core, download
|
||||
options:
|
||||
- both
|
||||
- core
|
||||
- download
|
||||
required: true
|
||||
default: both
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
select:
|
||||
name: Select tests from inputs
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
cpython-versions: ${{ steps.run.outputs.cpython-versions }}
|
||||
test-set: ${{ steps.run.outputs.test-set }}
|
||||
own-pip-versions: ${{ steps.run.outputs.own-pip-versions }}
|
||||
steps:
|
||||
# push and pull_request inputs aren't known to GitHub (pt3)
|
||||
- name: Set push defaults
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
env:
|
||||
cpython-versions: all
|
||||
test-set: core
|
||||
run: |
|
||||
echo "cpython-versions=${{env.cpython-versions}}" >> "$GITHUB_ENV"
|
||||
echo "test_set=${{env.test_set}}" >> "$GITHUB_ENV"
|
||||
- name: Get pull_request inputs
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
env:
|
||||
cpython-versions: main
|
||||
test-set: both
|
||||
run: |
|
||||
echo "cpython-versions=${{env.cpython-versions}}" >> "$GITHUB_ENV"
|
||||
echo "test_set=${{env.test_set}}" >> "$GITHUB_ENV"
|
||||
- name: Make version array
|
||||
id: run
|
||||
run: |
|
||||
# Make a JSON Array from comma/space-separated string (no extra escaping)
|
||||
json_list() { \
|
||||
ret=""; IFS="${IFS},"; set -- $*; \
|
||||
for a in "$@"; do \
|
||||
ret=$(printf '%s"%s"' "${ret}${ret:+, }" "$a"); \
|
||||
done; \
|
||||
printf '[%s]' "$ret"; }
|
||||
tests="${{ inputs.test-set || env.test-set }}"
|
||||
[ $tests = both ] && tests="core download"
|
||||
printf 'test-set=%s\n' "$(json_list $tests)" >> "$GITHUB_OUTPUT"
|
||||
versions="${{ inputs.cpython-versions || env.cpython-versions }}"
|
||||
if [ "$versions" = all ]; then \
|
||||
versions="${{ env.all-cpython-versions }}"; else \
|
||||
versions="${{ env.main-cpython-versions }}"; \
|
||||
fi
|
||||
printf 'cpython-versions=%s\n' \
|
||||
"$(json_list ${versions}${versions:+, }${{ env.pypy-versions }})" >> "$GITHUB_OUTPUT"
|
||||
# versions with a special get-pip.py in a per-version subdirectory
|
||||
printf 'own-pip-versions=%s\n' \
|
||||
"$(json_list 2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
tests:
|
||||
name: Run tests
|
||||
needs: select
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
runs-on: ${{ matrix.os }}
|
||||
env:
|
||||
PIP: python -m pip
|
||||
PIP_DISABLE_PIP_VERSION_CHECK: true
|
||||
PIP_NO_PYTHON_VERSION_WARNING: true
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
os: [ubuntu-20.04]
|
||||
python-version: ${{ fromJSON(needs.select.outputs.cpython-versions) }}
|
||||
python-impl: [cpython]
|
||||
ytdl-test-set: ${{ fromJSON(needs.select.outputs.test-set) }}
|
||||
run-tests-ext: [sh]
|
||||
include:
|
||||
- os: windows-2019
|
||||
python-version: 3.4
|
||||
python-impl: cpython
|
||||
ytdl-test-set: ${{ contains(needs.select.outputs.test-set, 'core') && 'core' || 'nocore' }}
|
||||
run-tests-ext: bat
|
||||
- os: windows-2019
|
||||
python-version: 3.4
|
||||
python-impl: cpython
|
||||
ytdl-test-set: ${{ contains(needs.select.outputs.test-set, 'download') && 'download' || 'nodownload' }}
|
||||
run-tests-ext: bat
|
||||
# jython
|
||||
- os: ubuntu-20.04
|
||||
python-version: 2.7
|
||||
python-impl: jython
|
||||
ytdl-test-set: ${{ contains(needs.select.outputs.test-set, 'core') && 'core' || 'nocore' }}
|
||||
run-tests-ext: sh
|
||||
- os: ubuntu-20.04
|
||||
python-version: 2.7
|
||||
python-impl: jython
|
||||
ytdl-test-set: ${{ contains(needs.select.outputs.test-set, 'download') && 'download' || 'nodownload' }}
|
||||
run-tests-ext: sh
|
||||
steps:
|
||||
- name: Prepare Linux
|
||||
if: ${{ startswith(matrix.os, 'ubuntu') }}
|
||||
shell: bash
|
||||
run: |
|
||||
# apt in runner, if needed, may not be up-to-date
|
||||
sudo apt-get update
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
#-------- Python 3 -----
|
||||
- name: Set up supported Python ${{ matrix.python-version }}
|
||||
id: setup-python
|
||||
if: ${{ matrix.python-impl == 'cpython' && matrix.python-version != '2.6' && matrix.python-version != '2.7' && matrix.python-version != env.next }}
|
||||
# wrap broken actions/setup-python@v4
|
||||
# NB may run apt-get install in Linux
|
||||
uses: ytdl-org/setup-python@v1
|
||||
env:
|
||||
# Temporary workaround for Python 3.5 failures - May 2024
|
||||
PIP_TRUSTED_HOST: "pypi.python.org pypi.org files.pythonhosted.org"
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache-build: true
|
||||
allow-build: info
|
||||
- name: Locate supported Python ${{ matrix.python-version }}
|
||||
if: ${{ env.pythonLocation }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo "PYTHONHOME=${pythonLocation}" >> "$GITHUB_ENV"
|
||||
export expected="${{ steps.setup-python.outputs.python-path }}"
|
||||
dirname() { printf '%s\n' \
|
||||
'import os, sys' \
|
||||
'print(os.path.dirname(sys.argv[1]))' \
|
||||
| ${expected} - "$1"; }
|
||||
expd="$(dirname "$expected")"
|
||||
export python="$(command -v python)"
|
||||
[ "$expd" = "$(dirname "$python")" ] || echo "PATH=$expd:${PATH}" >> "$GITHUB_ENV"
|
||||
[ -x "$python" ] || printf '%s\n' \
|
||||
'import os' \
|
||||
'exp = os.environ["expected"]' \
|
||||
'python = os.environ["python"]' \
|
||||
'exps = os.path.split(exp)' \
|
||||
'if python and (os.path.dirname(python) == exp[0]):' \
|
||||
' exit(0)' \
|
||||
'exps[1] = "python" + os.path.splitext(exps[1])[1]' \
|
||||
'python = os.path.join(*exps)' \
|
||||
'try:' \
|
||||
' os.symlink(exp, python)' \
|
||||
'except AttributeError:' \
|
||||
' os.rename(exp, python)' \
|
||||
| ${expected} -
|
||||
printf '%s\n' \
|
||||
'import sys' \
|
||||
'print(sys.path)' \
|
||||
| ${expected} -
|
||||
#-------- Python next (was 3.12) -
|
||||
- name: Set up CPython 3.next environment
|
||||
if: ${{ matrix.python-impl == 'cpython' && matrix.python-version == env.next }}
|
||||
shell: bash
|
||||
run: |
|
||||
PYENV_ROOT=$HOME/.local/share/pyenv
|
||||
echo "PYENV_ROOT=${PYENV_ROOT}" >> "$GITHUB_ENV"
|
||||
- name: Cache Python 3.next
|
||||
id: cachenext
|
||||
if: ${{ matrix.python-impl == 'cpython' && matrix.python-version == env.next }}
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
key: python-${{ env.next }}
|
||||
path: |
|
||||
${{ env.PYENV_ROOT }}
|
||||
- name: Build and set up Python 3.next
|
||||
if: ${{ matrix.python-impl == 'cpython' && matrix.python-version == env.next && ! steps.cachenext.outputs.cache-hit }}
|
||||
# dl and build locally
|
||||
shell: bash
|
||||
run: |
|
||||
# Install build environment
|
||||
sudo apt-get install -y build-essential llvm libssl-dev tk-dev \
|
||||
libncursesw5-dev libreadline-dev libsqlite3-dev \
|
||||
libffi-dev xz-utils zlib1g-dev libbz2-dev liblzma-dev
|
||||
# Download PyEnv from its GitHub repository.
|
||||
export PYENV_ROOT=${{ env.PYENV_ROOT }}
|
||||
export PATH=$PYENV_ROOT/bin:$PATH
|
||||
git clone "https://github.com/pyenv/pyenv.git" "$PYENV_ROOT"
|
||||
pyenv install ${{ env.next }}
|
||||
- name: Locate Python 3.next
|
||||
if: ${{ matrix.python-impl == 'cpython' && matrix.python-version == env.next }}
|
||||
shell: bash
|
||||
run: |
|
||||
PYTHONHOME="$(echo "${{ env.PYENV_ROOT }}/versions/${{ env.next }}."*)"
|
||||
test -n "$PYTHONHOME"
|
||||
echo "PYTHONHOME=$PYTHONHOME" >> "$GITHUB_ENV"
|
||||
echo "PATH=${PYTHONHOME}/bin:$PATH" >> "$GITHUB_ENV"
|
||||
#-------- Python 2.7 --
|
||||
- name: Set up Python 2.7
|
||||
if: ${{ matrix.python-impl == 'cpython' && matrix.python-version == '2.7' }}
|
||||
# install 2.7
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt-get install -y python2 python-is-python2
|
||||
echo "PYTHONHOME=/usr" >> "$GITHUB_ENV"
|
||||
#-------- Python 2.6 --
|
||||
- name: Set up Python 2.6 environment
|
||||
if: ${{ matrix.python-impl == 'cpython' && matrix.python-version == '2.6' }}
|
||||
shell: bash
|
||||
run: |
|
||||
openssl_name=openssl-1.0.2u
|
||||
echo "openssl_name=${openssl_name}" >> "$GITHUB_ENV"
|
||||
openssl_dir=$HOME/.local/opt/$openssl_name
|
||||
echo "openssl_dir=${openssl_dir}" >> "$GITHUB_ENV"
|
||||
PYENV_ROOT=$HOME/.local/share/pyenv
|
||||
echo "PYENV_ROOT=${PYENV_ROOT}" >> "$GITHUB_ENV"
|
||||
sudo apt-get install -y openssl ca-certificates
|
||||
- name: Cache Python 2.6
|
||||
id: cache26
|
||||
if: ${{ matrix.python-version == '2.6' }}
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
key: python-2.6.9
|
||||
path: |
|
||||
${{ env.openssl_dir }}
|
||||
${{ env.PYENV_ROOT }}
|
||||
- name: Build and set up Python 2.6
|
||||
if: ${{ matrix.python-impl == 'cpython' && matrix.python-version == '2.6' && ! steps.cache26.outputs.cache-hit }}
|
||||
# dl and build locally
|
||||
shell: bash
|
||||
run: |
|
||||
# Install build environment
|
||||
sudo apt-get install -y build-essential llvm libssl-dev tk-dev \
|
||||
libncursesw5-dev libreadline-dev libsqlite3-dev \
|
||||
libffi-dev xz-utils zlib1g-dev libbz2-dev liblzma-dev
|
||||
# Download and install OpenSSL 1.0.2, back in time
|
||||
openssl_name=${{ env.openssl_name }}
|
||||
openssl_targz=${openssl_name}.tar.gz
|
||||
openssl_dir=${{ env.openssl_dir }}
|
||||
openssl_inc=$openssl_dir/include
|
||||
openssl_lib=$openssl_dir/lib
|
||||
openssl_ssl=$openssl_dir/ssl
|
||||
curl -L "https://www.openssl.org/source/$openssl_targz" -o $openssl_targz
|
||||
tar -xf $openssl_targz
|
||||
( cd $openssl_name; \
|
||||
./config --prefix=$openssl_dir --openssldir=${openssl_dir}/ssl \
|
||||
--libdir=lib -Wl,-rpath=${openssl_dir}/lib shared zlib-dynamic && \
|
||||
make && \
|
||||
make install )
|
||||
rm -rf $openssl_name
|
||||
rmdir $openssl_ssl/certs && ln -s /etc/ssl/certs $openssl_ssl/certs
|
||||
# Download PyEnv from its GitHub repository.
|
||||
export PYENV_ROOT=${{ env.PYENV_ROOT }}
|
||||
export PATH=$PYENV_ROOT/bin:$PATH
|
||||
git clone "https://github.com/pyenv/pyenv.git" "$PYENV_ROOT"
|
||||
# Prevent pyenv build trying (and failing) to update pip
|
||||
export GET_PIP=get-pip-2.6.py
|
||||
echo 'import sys; sys.exit(0)' > ${GET_PIP}
|
||||
GET_PIP=$(realpath $GET_PIP)
|
||||
# Build and install Python
|
||||
export CFLAGS="-I$openssl_inc"
|
||||
export LDFLAGS="-L$openssl_lib"
|
||||
export LD_LIBRARY_PATH="$openssl_lib"
|
||||
pyenv install 2.6.9
|
||||
- name: Locate Python 2.6
|
||||
if: ${{ matrix.python-impl == 'cpython' && matrix.python-version == '2.6' }}
|
||||
shell: bash
|
||||
run: |
|
||||
PYTHONHOME="${{ env.PYENV_ROOT }}/versions/2.6.9"
|
||||
echo "PYTHONHOME=$PYTHONHOME" >> "$GITHUB_ENV"
|
||||
echo "PATH=${PYTHONHOME}/bin:$PATH" >> "$GITHUB_ENV"
|
||||
echo "LD_LIBRARY_PATH=${{ env.openssl_dir }}/lib${LD_LIBRARY_PATH:+:}${LD_LIBRARY_PATH}" >> "$GITHUB_ENV"
|
||||
#-------- Jython ------
|
||||
- name: Set up Java 8
|
||||
if: ${{ matrix.python-impl == 'jython' }}
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: 8
|
||||
distribution: 'zulu'
|
||||
- name: Setup Jython environment
|
||||
if: ${{ matrix.python-impl == 'jython' }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo "JYTHON_ROOT=${HOME}/jython" >> "$GITHUB_ENV"
|
||||
echo "PIP=pip" >> "$GITHUB_ENV"
|
||||
- name: Cache Jython
|
||||
id: cachejy
|
||||
if: ${{ matrix.python-impl == 'jython' && matrix.python-version == '2.7' }}
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
# 2.7.3 now available, may solve SNI issue
|
||||
key: jython-2.7.1
|
||||
path: |
|
||||
${{ env.JYTHON_ROOT }}
|
||||
- name: Install Jython
|
||||
if: ${{ matrix.python-impl == 'jython' && matrix.python-version == '2.7' && ! steps.cachejy.outputs.cache-hit }}
|
||||
shell: bash
|
||||
run: |
|
||||
JYTHON_ROOT="${{ env.JYTHON_ROOT }}"
|
||||
curl -L "https://repo1.maven.org/maven2/org/python/jython-installer/2.7.1/jython-installer-2.7.1.jar" -o jython-installer.jar
|
||||
java -jar jython-installer.jar -s -d "${JYTHON_ROOT}"
|
||||
echo "${JYTHON_ROOT}/bin" >> "$GITHUB_PATH"
|
||||
- name: Set up cached Jython
|
||||
if: ${{ steps.cachejy.outputs.cache-hit }}
|
||||
shell: bash
|
||||
run: |
|
||||
JYTHON_ROOT="${{ env.JYTHON_ROOT }}"
|
||||
echo "${JYTHON_ROOT}/bin" >> $GITHUB_PATH
|
||||
- name: Install supporting Python 2.7 if possible
|
||||
if: ${{ steps.cachejy.outputs.cache-hit }}
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt-get install -y python2.7 || true
|
||||
#-------- pip ---------
|
||||
- name: Set up supported Python ${{ matrix.python-version }} pip
|
||||
if: ${{ (matrix.python-version != '3.2' && steps.setup-python.outputs.python-path) || matrix.python-version == '2.7' }}
|
||||
# This step may run in either Linux or Windows
|
||||
shell: bash
|
||||
run: |
|
||||
echo "$PATH"
|
||||
echo "$PYTHONHOME"
|
||||
# curl is available on both Windows and Linux, -L follows redirects, -O gets name
|
||||
python -m ensurepip || python -m pip --version || { \
|
||||
get_pip="${{ contains(needs.select.outputs.own-pip-versions, matrix.python-version) && format('{0}/', matrix.python-version) || '' }}"; \
|
||||
curl -L -O "https://bootstrap.pypa.io/pip/${get_pip}get-pip.py"; \
|
||||
python get-pip.py; }
|
||||
- name: Set up Python 2.6 pip
|
||||
if: ${{ matrix.python-version == '2.6' }}
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip --version || { \
|
||||
curl -L -O "https://bootstrap.pypa.io/pip/2.6/get-pip.py"; \
|
||||
curl -L -O "https://files.pythonhosted.org/packages/ac/95/a05b56bb975efa78d3557efa36acaf9cf5d2fd0ee0062060493687432e03/pip-9.0.3-py2.py3-none-any.whl"; \
|
||||
python get-pip.py --no-setuptools --no-wheel pip-9.0.3-py2.py3-none-any.whl; }
|
||||
# work-around to invoke pip module on 2.6: https://bugs.python.org/issue2751
|
||||
echo "PIP=python -m pip.__main__" >> "$GITHUB_ENV"
|
||||
- name: Set up other Python ${{ matrix.python-version }} pip
|
||||
if: ${{ matrix.python-version == '3.2' && steps.setup-python.outputs.python-path }}
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip --version || { \
|
||||
curl -L -O "https://bootstrap.pypa.io/pip/3.2/get-pip.py"; \
|
||||
curl -L -O "https://files.pythonhosted.org/packages/b2/d0/cd115fe345dd6f07ec1c780020a7dfe74966fceeb171e0f20d1d4905b0b7/pip-7.1.2-py2.py3-none-any.whl"; \
|
||||
python get-pip.py --no-setuptools --no-wheel pip-7.1.2-py2.py3-none-any.whl; }
|
||||
#-------- unittest ----
|
||||
- name: Upgrade Unittest for Python 2.6
|
||||
if: ${{ matrix.python-version == '2.6' }}
|
||||
shell: bash
|
||||
run: |
|
||||
# Work around deprecation of support for non-SNI clients at PyPI CDN (see https://status.python.org/incidents/hzmjhqsdjqgb)
|
||||
$PIP -qq show unittest2 || { \
|
||||
for u in "65/26/32b8464df2a97e6dd1b656ed26b2c194606c16fe163c695a992b36c11cdf/six-1.13.0-py2.py3-none-any.whl" \
|
||||
"f2/94/3af39d34be01a24a6e65433d19e107099374224905f1e0cc6bbe1fd22a2f/argparse-1.4.0-py2.py3-none-any.whl" \
|
||||
"c7/a3/c5da2a44c85bfbb6eebcfc1dde24933f8704441b98fdde6528f4831757a6/linecache2-1.0.0-py2.py3-none-any.whl" \
|
||||
"17/0a/6ac05a3723017a967193456a2efa0aa9ac4b51456891af1e2353bb9de21e/traceback2-1.4.0-py2.py3-none-any.whl" \
|
||||
"72/20/7f0f433060a962200b7272b8c12ba90ef5b903e218174301d0abfd523813/unittest2-1.1.0-py2.py3-none-any.whl"; do \
|
||||
curl -L -O "https://files.pythonhosted.org/packages/${u}"; \
|
||||
$PIP install ${u##*/}; \
|
||||
done; }
|
||||
# make tests use unittest2
|
||||
for test in ./test/test_*.py ./test/helper.py; do
|
||||
sed -r -i -e '/^import unittest$/s/test/test2 as unittest/' "$test"
|
||||
done
|
||||
#-------- nose --------
|
||||
- name: Install nose for Python ${{ matrix.python-version }}
|
||||
if: ${{ (matrix.python-version != '3.2' && steps.setup-python.outputs.python-path) || (matrix.python-impl == 'cpython' && (matrix.python-version == '2.7' || matrix.python-version == env.next)) }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo "$PATH"
|
||||
echo "$PYTHONHOME"
|
||||
# Use PyNose for recent Pythons instead of Nose
|
||||
py3ver="${{ matrix.python-version }}"
|
||||
py3ver=${py3ver#3.}
|
||||
[ "$py3ver" != "${{ matrix.python-version }}" ] && py3ver=${py3ver%.*} || py3ver=0
|
||||
[ "$py3ver" -ge 9 ] && nose=pynose || nose=nose
|
||||
$PIP -qq show $nose || $PIP install $nose
|
||||
- name: Install nose for other Python 2
|
||||
if: ${{ matrix.python-impl == 'jython' || (matrix.python-impl == 'cpython' && matrix.python-version == '2.6') }}
|
||||
shell: bash
|
||||
run: |
|
||||
# Work around deprecation of support for non-SNI clients at PyPI CDN (see https://status.python.org/incidents/hzmjhqsdjqgb)
|
||||
$PIP -qq show nose || { \
|
||||
curl -L -O "https://files.pythonhosted.org/packages/99/4f/13fb671119e65c4dce97c60e67d3fd9e6f7f809f2b307e2611f4701205cb/nose-1.3.7-py2-none-any.whl"; \
|
||||
$PIP install nose-1.3.7-py2-none-any.whl; }
|
||||
- name: Install nose for other Python 3
|
||||
if: ${{ matrix.python-version == '3.2' && steps.setup-python.outputs.python-path }}
|
||||
shell: bash
|
||||
run: |
|
||||
$PIP -qq show nose || { \
|
||||
curl -L -O "https://files.pythonhosted.org/packages/15/d8/dd071918c040f50fa1cf80da16423af51ff8ce4a0f2399b7bf8de45ac3d9/nose-1.3.7-py3-none-any.whl"; \
|
||||
$PIP install nose-1.3.7-py3-none-any.whl; }
|
||||
- name: Set up nosetest test
|
||||
if: ${{ contains(needs.select.outputs.test-set, matrix.ytdl-test-set ) }}
|
||||
shell: bash
|
||||
run: |
|
||||
# set PYTHON_VER
|
||||
PYTHON_VER=${{ matrix.python-version }}
|
||||
[ "${PYTHON_VER#*-}" != "$PYTHON_VER" ] || PYTHON_VER="${{ matrix.python-impl }}-${PYTHON_VER}"
|
||||
echo "PYTHON_VER=$PYTHON_VER" >> "$GITHUB_ENV"
|
||||
echo "PYTHON_IMPL=${{ matrix.python-impl }}" >> "$GITHUB_ENV"
|
||||
# define a test to validate the Python version used by nosetests
|
||||
printf '%s\n' \
|
||||
'from __future__ import unicode_literals' \
|
||||
'import sys, os, platform' \
|
||||
'try:' \
|
||||
' import unittest2 as unittest' \
|
||||
'except ImportError:' \
|
||||
' import unittest' \
|
||||
'class TestPython(unittest.TestCase):' \
|
||||
' def setUp(self):' \
|
||||
' self.ver = os.environ["PYTHON_VER"].split("-")' \
|
||||
' def test_python_ver(self):' \
|
||||
' self.assertEqual(["%d" % v for v in sys.version_info[:2]], self.ver[-1].split(".")[:2])' \
|
||||
' self.assertTrue(sys.version.startswith(self.ver[-1]))' \
|
||||
' self.assertIn(self.ver[0], ",".join((sys.version, platform.python_implementation())).lower())' \
|
||||
' def test_python_impl(self):' \
|
||||
' self.assertIn(platform.python_implementation().lower(), (os.environ["PYTHON_IMPL"], self.ver[0]))' \
|
||||
> test/test_python.py
|
||||
#-------- TESTS -------
|
||||
- name: Run tests
|
||||
if: ${{ contains(needs.select.outputs.test-set, matrix.ytdl-test-set ) }}
|
||||
continue-on-error: ${{ matrix.ytdl-test-set == 'download' || matrix.python-impl == 'jython' }}
|
||||
env:
|
||||
YTDL_TEST_SET: ${{ matrix.ytdl-test-set }}
|
||||
run: |
|
||||
./devscripts/run_tests.${{ matrix.run-tests-ext }}
|
||||
flake8:
|
||||
name: Linter
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install flake8
|
||||
run: pip install flake8
|
||||
- name: Run flake8
|
||||
run: flake8 .
|
||||
|
@ -1,50 +0,0 @@
|
||||
language: python
|
||||
python:
|
||||
- "2.6"
|
||||
- "2.7"
|
||||
- "3.2"
|
||||
- "3.3"
|
||||
- "3.4"
|
||||
- "3.5"
|
||||
- "3.6"
|
||||
- "pypy"
|
||||
- "pypy3"
|
||||
dist: trusty
|
||||
env:
|
||||
- YTDL_TEST_SET=core
|
||||
- YTDL_TEST_SET=download
|
||||
jobs:
|
||||
include:
|
||||
- python: 3.7
|
||||
dist: xenial
|
||||
env: YTDL_TEST_SET=core
|
||||
- python: 3.7
|
||||
dist: xenial
|
||||
env: YTDL_TEST_SET=download
|
||||
- python: 3.8
|
||||
dist: xenial
|
||||
env: YTDL_TEST_SET=core
|
||||
- python: 3.8
|
||||
dist: xenial
|
||||
env: YTDL_TEST_SET=download
|
||||
- python: 3.8-dev
|
||||
dist: xenial
|
||||
env: YTDL_TEST_SET=core
|
||||
- python: 3.8-dev
|
||||
dist: xenial
|
||||
env: YTDL_TEST_SET=download
|
||||
- env: JYTHON=true; YTDL_TEST_SET=core
|
||||
- env: JYTHON=true; YTDL_TEST_SET=download
|
||||
- name: flake8
|
||||
python: 3.8
|
||||
dist: xenial
|
||||
install: pip install flake8
|
||||
script: flake8 .
|
||||
fast_finish: true
|
||||
allow_failures:
|
||||
- env: YTDL_TEST_SET=download
|
||||
- env: JYTHON=true; YTDL_TEST_SET=core
|
||||
- env: JYTHON=true; YTDL_TEST_SET=download
|
||||
before_install:
|
||||
- if [ "$JYTHON" == "true" ]; then ./devscripts/install_jython.sh; export PATH="$HOME/jython/bin:$PATH"; fi
|
||||
script: ./devscripts/run_tests.sh
|
@ -0,0 +1 @@
|
||||
# Empty file needed to make devscripts.utils properly importable from outside
|
@ -0,0 +1,83 @@
|
||||
#!/usr/bin/env python
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
"""
|
||||
This script displays the API parameters corresponding to a yt-dl command line
|
||||
|
||||
Example:
|
||||
$ ./cli_to_api.py -f best
|
||||
{u'format': 'best'}
|
||||
$
|
||||
"""
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import youtube_dl
|
||||
from types import MethodType
|
||||
|
||||
|
||||
def cli_to_api(*opts):
|
||||
YDL = youtube_dl.YoutubeDL
|
||||
|
||||
# to extract the parsed options, break out of YoutubeDL instantiation
|
||||
|
||||
# return options via this Exception
|
||||
class ParseYTDLResult(Exception):
|
||||
def __init__(self, result):
|
||||
super(ParseYTDLResult, self).__init__('result')
|
||||
self.opts = result
|
||||
|
||||
# replacement constructor that raises ParseYTDLResult
|
||||
def ytdl_init(ydl, ydl_opts):
|
||||
super(YDL, ydl).__init__(ydl_opts)
|
||||
raise ParseYTDLResult(ydl_opts)
|
||||
|
||||
# patch in the constructor
|
||||
YDL.__init__ = MethodType(ytdl_init, YDL)
|
||||
|
||||
# core parser
|
||||
def parsed_options(argv):
|
||||
try:
|
||||
youtube_dl._real_main(list(argv))
|
||||
except ParseYTDLResult as result:
|
||||
return result.opts
|
||||
|
||||
# from https://github.com/yt-dlp/yt-dlp/issues/5859#issuecomment-1363938900
|
||||
default = parsed_options([])
|
||||
|
||||
def neq_opt(a, b):
|
||||
if a == b:
|
||||
return False
|
||||
if a is None and repr(type(object)).endswith(".utils.DateRange'>"):
|
||||
return '0001-01-01 - 9999-12-31' != '{0}'.format(b)
|
||||
return a != b
|
||||
|
||||
diff = dict((k, v) for k, v in parsed_options(opts).items() if neq_opt(default[k], v))
|
||||
if 'postprocessors' in diff:
|
||||
diff['postprocessors'] = [pp for pp in diff['postprocessors'] if pp not in default['postprocessors']]
|
||||
return diff
|
||||
|
||||
|
||||
def main():
|
||||
from pprint import PrettyPrinter
|
||||
|
||||
pprint = PrettyPrinter()
|
||||
super_format = pprint.format
|
||||
|
||||
def format(object, context, maxlevels, level):
|
||||
if repr(type(object)).endswith(".utils.DateRange'>"):
|
||||
return '{0}: {1}>'.format(repr(object)[:-2], object), True, False
|
||||
return super_format(object, context, maxlevels, level)
|
||||
|
||||
pprint.format = format
|
||||
|
||||
pprint.pprint(cli_to_api(*sys.argv))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -1,5 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
wget http://central.maven.org/maven2/org/python/jython-installer/2.7.1/jython-installer-2.7.1.jar
|
||||
java -jar jython-installer-2.7.1.jar -s -d "$HOME/jython"
|
||||
$HOME/jython/bin/jython -m pip install nose
|
@ -0,0 +1,17 @@
|
||||
@echo off
|
||||
|
||||
rem Keep this list in sync with the `offlinetest` target in Makefile
|
||||
set DOWNLOAD_TESTS="age_restriction^|download^|iqiyi_sdk_interpreter^|socks^|subtitles^|write_annotations^|youtube_lists^|youtube_signature"
|
||||
|
||||
if "%YTDL_TEST_SET%" == "core" (
|
||||
set test_set="-I test_("%DOWNLOAD_TESTS%")\.py"
|
||||
set multiprocess_args=""
|
||||
) else if "%YTDL_TEST_SET%" == "download" (
|
||||
set test_set="-I test_(?!"%DOWNLOAD_TESTS%").+\.py"
|
||||
set multiprocess_args="--processes=4 --process-timeout=540"
|
||||
) else (
|
||||
echo YTDL_TEST_SET is not set or invalid
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
nosetests test --verbose %test_set:"=% %multiprocess_args:"=%
|
@ -0,0 +1,62 @@
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import argparse
|
||||
import functools
|
||||
import os.path
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
dirn = os.path.dirname
|
||||
|
||||
sys.path.insert(0, dirn(dirn(os.path.abspath(__file__))))
|
||||
|
||||
from youtube_dl.compat import (
|
||||
compat_kwargs,
|
||||
compat_open as open,
|
||||
)
|
||||
|
||||
|
||||
def read_file(fname):
|
||||
with open(fname, encoding='utf-8') as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def write_file(fname, content, mode='w'):
|
||||
with open(fname, mode, encoding='utf-8') as f:
|
||||
return f.write(content)
|
||||
|
||||
|
||||
def read_version(fname='youtube_dl/version.py'):
|
||||
"""Get the version without importing the package"""
|
||||
exec(compile(read_file(fname), fname, 'exec'))
|
||||
return locals()['__version__']
|
||||
|
||||
|
||||
def get_filename_args(has_infile=False, default_outfile=None):
|
||||
parser = argparse.ArgumentParser()
|
||||
if has_infile:
|
||||
parser.add_argument('infile', help='Input file')
|
||||
kwargs = {'nargs': '?', 'default': default_outfile} if default_outfile else {}
|
||||
kwargs['help'] = 'Output file'
|
||||
parser.add_argument('outfile', **compat_kwargs(kwargs))
|
||||
|
||||
opts = parser.parse_args()
|
||||
if has_infile:
|
||||
return opts.infile, opts.outfile
|
||||
return opts.outfile
|
||||
|
||||
|
||||
def compose_functions(*functions):
|
||||
return lambda x: functools.reduce(lambda y, f: f(y), functions, x)
|
||||
|
||||
|
||||
def run_process(*args, **kwargs):
|
||||
kwargs.setdefault('text', True)
|
||||
kwargs.setdefault('check', True)
|
||||
kwargs.setdefault('capture_output', True)
|
||||
if kwargs['text']:
|
||||
kwargs.setdefault('encoding', 'utf-8')
|
||||
kwargs.setdefault('errors', 'replace')
|
||||
kwargs = compat_kwargs(kwargs)
|
||||
return subprocess.run(args, **kwargs)
|
@ -0,0 +1,272 @@
|
||||
#!/usr/bin/env python
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import subprocess
|
||||
import unittest
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from test.helper import (
|
||||
FakeLogger,
|
||||
FakeYDL,
|
||||
http_server_port,
|
||||
try_rm,
|
||||
)
|
||||
from youtube_dl import YoutubeDL
|
||||
from youtube_dl.compat import (
|
||||
compat_contextlib_suppress,
|
||||
compat_http_cookiejar_Cookie,
|
||||
compat_http_server,
|
||||
compat_kwargs,
|
||||
)
|
||||
from youtube_dl.utils import (
|
||||
encodeFilename,
|
||||
join_nonempty,
|
||||
)
|
||||
from youtube_dl.downloader.external import (
|
||||
Aria2cFD,
|
||||
Aria2pFD,
|
||||
AxelFD,
|
||||
CurlFD,
|
||||
FFmpegFD,
|
||||
HttpieFD,
|
||||
WgetFD,
|
||||
)
|
||||
from youtube_dl.postprocessor import (
|
||||
FFmpegPostProcessor,
|
||||
)
|
||||
import threading
|
||||
|
||||
TEST_SIZE = 10 * 1024
|
||||
|
||||
TEST_COOKIE = {
|
||||
'version': 0,
|
||||
'name': 'test',
|
||||
'value': 'ytdlp',
|
||||
'port': None,
|
||||
'port_specified': False,
|
||||
'domain': '.example.com',
|
||||
'domain_specified': True,
|
||||
'domain_initial_dot': False,
|
||||
'path': '/',
|
||||
'path_specified': True,
|
||||
'secure': False,
|
||||
'expires': None,
|
||||
'discard': False,
|
||||
'comment': None,
|
||||
'comment_url': None,
|
||||
'rest': {},
|
||||
}
|
||||
|
||||
TEST_COOKIE_VALUE = join_nonempty('name', 'value', delim='=', from_dict=TEST_COOKIE)
|
||||
|
||||
TEST_INFO = {'url': 'http://www.example.com/'}
|
||||
|
||||
|
||||
def cookiejar_Cookie(**cookie_args):
|
||||
return compat_http_cookiejar_Cookie(**compat_kwargs(cookie_args))
|
||||
|
||||
|
||||
def ifExternalFDAvailable(externalFD):
|
||||
return unittest.skipUnless(externalFD.available(),
|
||||
externalFD.get_basename() + ' not found')
|
||||
|
||||
|
||||
class HTTPTestRequestHandler(compat_http_server.BaseHTTPRequestHandler):
|
||||
def log_message(self, format, *args):
|
||||
pass
|
||||
|
||||
def send_content_range(self, total=None):
|
||||
range_header = self.headers.get('Range')
|
||||
start = end = None
|
||||
if range_header:
|
||||
mobj = re.match(r'bytes=(\d+)-(\d+)', range_header)
|
||||
if mobj:
|
||||
start, end = (int(mobj.group(i)) for i in (1, 2))
|
||||
valid_range = start is not None and end is not None
|
||||
if valid_range:
|
||||
content_range = 'bytes %d-%d' % (start, end)
|
||||
if total:
|
||||
content_range += '/%d' % total
|
||||
self.send_header('Content-Range', content_range)
|
||||
return (end - start + 1) if valid_range else total
|
||||
|
||||
def serve(self, range=True, content_length=True):
|
||||
self.send_response(200)
|
||||
self.send_header('Content-Type', 'video/mp4')
|
||||
size = TEST_SIZE
|
||||
if range:
|
||||
size = self.send_content_range(TEST_SIZE)
|
||||
if content_length:
|
||||
self.send_header('Content-Length', size)
|
||||
self.end_headers()
|
||||
self.wfile.write(b'#' * size)
|
||||
|
||||
def do_GET(self):
|
||||
if self.path == '/regular':
|
||||
self.serve()
|
||||
elif self.path == '/no-content-length':
|
||||
self.serve(content_length=False)
|
||||
elif self.path == '/no-range':
|
||||
self.serve(range=False)
|
||||
elif self.path == '/no-range-no-content-length':
|
||||
self.serve(range=False, content_length=False)
|
||||
else:
|
||||
assert False, 'unrecognised server path'
|
||||
|
||||
|
||||
@ifExternalFDAvailable(Aria2pFD)
|
||||
class TestAria2pFD(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.httpd = compat_http_server.HTTPServer(
|
||||
('127.0.0.1', 0), HTTPTestRequestHandler)
|
||||
self.port = http_server_port(self.httpd)
|
||||
self.server_thread = threading.Thread(target=self.httpd.serve_forever)
|
||||
self.server_thread.daemon = True
|
||||
self.server_thread.start()
|
||||
|
||||
def download(self, params, ep):
|
||||
with subprocess.Popen(
|
||||
['aria2c', '--enable-rpc'],
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL
|
||||
) as process:
|
||||
if not process.poll():
|
||||
filename = 'testfile.mp4'
|
||||
params['logger'] = FakeLogger()
|
||||
params['outtmpl'] = filename
|
||||
ydl = YoutubeDL(params)
|
||||
try_rm(encodeFilename(filename))
|
||||
self.assertEqual(ydl.download(['http://127.0.0.1:%d/%s' % (self.port, ep)]), 0)
|
||||
self.assertEqual(os.path.getsize(encodeFilename(filename)), TEST_SIZE)
|
||||
try_rm(encodeFilename(filename))
|
||||
process.kill()
|
||||
|
||||
def download_all(self, params):
|
||||
for ep in ('regular', 'no-content-length', 'no-range', 'no-range-no-content-length'):
|
||||
self.download(params, ep)
|
||||
|
||||
def test_regular(self):
|
||||
self.download_all({'external_downloader': 'aria2p'})
|
||||
|
||||
def test_chunked(self):
|
||||
self.download_all({
|
||||
'external_downloader': 'aria2p',
|
||||
'http_chunk_size': 1000,
|
||||
})
|
||||
|
||||
|
||||
@ifExternalFDAvailable(HttpieFD)
|
||||
class TestHttpieFD(unittest.TestCase):
|
||||
def test_make_cmd(self):
|
||||
with FakeYDL() as ydl:
|
||||
downloader = HttpieFD(ydl, {})
|
||||
self.assertEqual(
|
||||
downloader._make_cmd('test', TEST_INFO),
|
||||
['http', '--download', '--output', 'test', 'http://www.example.com/'])
|
||||
|
||||
# Test cookie header is added
|
||||
ydl.cookiejar.set_cookie(cookiejar_Cookie(**TEST_COOKIE))
|
||||
self.assertEqual(
|
||||
downloader._make_cmd('test', TEST_INFO),
|
||||
['http', '--download', '--output', 'test',
|
||||
'http://www.example.com/', 'Cookie:' + TEST_COOKIE_VALUE])
|
||||
|
||||
|
||||
@ifExternalFDAvailable(AxelFD)
|
||||
class TestAxelFD(unittest.TestCase):
|
||||
def test_make_cmd(self):
|
||||
with FakeYDL() as ydl:
|
||||
downloader = AxelFD(ydl, {})
|
||||
self.assertEqual(
|
||||
downloader._make_cmd('test', TEST_INFO),
|
||||
['axel', '-o', 'test', '--', 'http://www.example.com/'])
|
||||
|
||||
# Test cookie header is added
|
||||
ydl.cookiejar.set_cookie(cookiejar_Cookie(**TEST_COOKIE))
|
||||
self.assertEqual(
|
||||
downloader._make_cmd('test', TEST_INFO),
|
||||
['axel', '-o', 'test', '-H', 'Cookie: ' + TEST_COOKIE_VALUE,
|
||||
'--max-redirect=0', '--', 'http://www.example.com/'])
|
||||
|
||||
|
||||
@ifExternalFDAvailable(WgetFD)
|
||||
class TestWgetFD(unittest.TestCase):
|
||||
def test_make_cmd(self):
|
||||
with FakeYDL() as ydl:
|
||||
downloader = WgetFD(ydl, {})
|
||||
self.assertNotIn('--load-cookies', downloader._make_cmd('test', TEST_INFO))
|
||||
# Test cookiejar tempfile arg is added
|
||||
ydl.cookiejar.set_cookie(cookiejar_Cookie(**TEST_COOKIE))
|
||||
self.assertIn('--load-cookies', downloader._make_cmd('test', TEST_INFO))
|
||||
|
||||
|
||||
@ifExternalFDAvailable(CurlFD)
|
||||
class TestCurlFD(unittest.TestCase):
|
||||
def test_make_cmd(self):
|
||||
with FakeYDL() as ydl:
|
||||
downloader = CurlFD(ydl, {})
|
||||
self.assertNotIn('--cookie', downloader._make_cmd('test', TEST_INFO))
|
||||
# Test cookie header is added
|
||||
ydl.cookiejar.set_cookie(cookiejar_Cookie(**TEST_COOKIE))
|
||||
self.assertIn('--cookie', downloader._make_cmd('test', TEST_INFO))
|
||||
self.assertIn(TEST_COOKIE_VALUE, downloader._make_cmd('test', TEST_INFO))
|
||||
|
||||
|
||||
@ifExternalFDAvailable(Aria2cFD)
|
||||
class TestAria2cFD(unittest.TestCase):
|
||||
def test_make_cmd(self):
|
||||
with FakeYDL() as ydl:
|
||||
downloader = Aria2cFD(ydl, {})
|
||||
downloader._make_cmd('test', TEST_INFO)
|
||||
self.assertFalse(hasattr(downloader, '_cookies_tempfile'))
|
||||
|
||||
# Test cookiejar tempfile arg is added
|
||||
ydl.cookiejar.set_cookie(cookiejar_Cookie(**TEST_COOKIE))
|
||||
cmd = downloader._make_cmd('test', TEST_INFO)
|
||||
self.assertIn('--load-cookies=%s' % downloader._cookies_tempfile, cmd)
|
||||
|
||||
|
||||
# Handle delegated availability
|
||||
def ifFFmpegFDAvailable(externalFD):
|
||||
# raise SkipTest, or set False!
|
||||
avail = ifExternalFDAvailable(externalFD) and False
|
||||
with compat_contextlib_suppress(Exception):
|
||||
avail = FFmpegPostProcessor(downloader=None).available
|
||||
return unittest.skipUnless(
|
||||
avail, externalFD.get_basename() + ' not found')
|
||||
|
||||
|
||||
@ifFFmpegFDAvailable(FFmpegFD)
|
||||
class TestFFmpegFD(unittest.TestCase):
|
||||
_args = []
|
||||
|
||||
def _test_cmd(self, args):
|
||||
self._args = args
|
||||
|
||||
def test_make_cmd(self):
|
||||
with FakeYDL() as ydl:
|
||||
downloader = FFmpegFD(ydl, {})
|
||||
downloader._debug_cmd = self._test_cmd
|
||||
info_dict = TEST_INFO.copy()
|
||||
info_dict['ext'] = 'mp4'
|
||||
|
||||
downloader._call_downloader('test', info_dict)
|
||||
self.assertEqual(self._args, [
|
||||
'ffmpeg', '-y', '-i', 'http://www.example.com/',
|
||||
'-c', 'copy', '-f', 'mp4', 'file:test'])
|
||||
|
||||
# Test cookies arg is added
|
||||
ydl.cookiejar.set_cookie(cookiejar_Cookie(**TEST_COOKIE))
|
||||
downloader._call_downloader('test', info_dict)
|
||||
self.assertEqual(self._args, [
|
||||
'ffmpeg', '-y', '-cookies', TEST_COOKIE_VALUE + '; path=/; domain=.example.com;\r\n',
|
||||
'-i', 'http://www.example.com/', '-c', 'copy', '-f', 'mp4', 'file:test'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -0,0 +1,509 @@
|
||||
#!/usr/bin/env python
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import re
|
||||
|
||||
from youtube_dl.traversal import (
|
||||
dict_get,
|
||||
get_first,
|
||||
T,
|
||||
traverse_obj,
|
||||
)
|
||||
from youtube_dl.compat import (
|
||||
compat_etree_fromstring,
|
||||
compat_http_cookies,
|
||||
compat_str,
|
||||
)
|
||||
from youtube_dl.utils import (
|
||||
int_or_none,
|
||||
str_or_none,
|
||||
)
|
||||
|
||||
_TEST_DATA = {
|
||||
100: 100,
|
||||
1.2: 1.2,
|
||||
'str': 'str',
|
||||
'None': None,
|
||||
'...': Ellipsis,
|
||||
'urls': [
|
||||
{'index': 0, 'url': 'https://www.example.com/0'},
|
||||
{'index': 1, 'url': 'https://www.example.com/1'},
|
||||
],
|
||||
'data': (
|
||||
{'index': 2},
|
||||
{'index': 3},
|
||||
),
|
||||
'dict': {},
|
||||
}
|
||||
|
||||
|
||||
if sys.version_info < (3, 0):
|
||||
class _TestCase(unittest.TestCase):
|
||||
|
||||
def assertCountEqual(self, *args, **kwargs):
|
||||
return self.assertItemsEqual(*args, **kwargs)
|
||||
else:
|
||||
_TestCase = unittest.TestCase
|
||||
|
||||
|
||||
class TestTraversal(_TestCase):
|
||||
def assertMaybeCountEqual(self, *args, **kwargs):
|
||||
if sys.version_info < (3, 7):
|
||||
# random dict order
|
||||
return self.assertCountEqual(*args, **kwargs)
|
||||
else:
|
||||
return self.assertEqual(*args, **kwargs)
|
||||
|
||||
def test_traverse_obj(self):
|
||||
# instant compat
|
||||
str = compat_str
|
||||
|
||||
# define a pukka Iterable
|
||||
def iter_range(stop):
|
||||
for from_ in range(stop):
|
||||
yield from_
|
||||
|
||||
# Test base functionality
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, ('str',)), 'str',
|
||||
msg='allow tuple path')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, ['str']), 'str',
|
||||
msg='allow list path')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, (value for value in ("str",))), 'str',
|
||||
msg='allow iterable path')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, 'str'), 'str',
|
||||
msg='single items should be treated as a path')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, None), _TEST_DATA)
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, 100), 100)
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, 1.2), 1.2)
|
||||
|
||||
# Test Ellipsis behavior
|
||||
self.assertCountEqual(traverse_obj(_TEST_DATA, Ellipsis),
|
||||
(item for item in _TEST_DATA.values() if item not in (None, {})),
|
||||
msg='`...` should give all non-discarded values')
|
||||
self.assertCountEqual(traverse_obj(_TEST_DATA, ('urls', 0, Ellipsis)), _TEST_DATA['urls'][0].values(),
|
||||
msg='`...` selection for dicts should select all values')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, (Ellipsis, Ellipsis, 'url')),
|
||||
['https://www.example.com/0', 'https://www.example.com/1'],
|
||||
msg='nested `...` queries should work')
|
||||
self.assertCountEqual(traverse_obj(_TEST_DATA, (Ellipsis, Ellipsis, 'index')), iter_range(4),
|
||||
msg='`...` query result should be flattened')
|
||||
self.assertEqual(traverse_obj(iter(range(4)), Ellipsis), list(range(4)),
|
||||
msg='`...` should accept iterables')
|
||||
|
||||
# Test function as key
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, lambda x, y: x == 'urls' and isinstance(y, list)),
|
||||
[_TEST_DATA['urls']],
|
||||
msg='function as query key should perform a filter based on (key, value)')
|
||||
self.assertCountEqual(traverse_obj(_TEST_DATA, lambda _, x: isinstance(x[0], str)), set(('str',)),
|
||||
msg='exceptions in the query function should be caught')
|
||||
self.assertEqual(traverse_obj(iter(range(4)), lambda _, x: x % 2 == 0), [0, 2],
|
||||
msg='function key should accept iterables')
|
||||
if __debug__:
|
||||
with self.assertRaises(Exception, msg='Wrong function signature should raise in debug'):
|
||||
traverse_obj(_TEST_DATA, lambda a: Ellipsis)
|
||||
with self.assertRaises(Exception, msg='Wrong function signature should raise in debug'):
|
||||
traverse_obj(_TEST_DATA, lambda a, b, c: Ellipsis)
|
||||
|
||||
# Test set as key (transformation/type, like `expected_type`)
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, (Ellipsis, T(str.upper), )), ['STR'],
|
||||
msg='Function in set should be a transformation')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, ('fail', T(lambda _: 'const'))), 'const',
|
||||
msg='Function in set should always be called')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, (Ellipsis, T(str))), ['str'],
|
||||
msg='Type in set should be a type filter')
|
||||
self.assertMaybeCountEqual(traverse_obj(_TEST_DATA, (Ellipsis, T(str, int))), [100, 'str'],
|
||||
msg='Multiple types in set should be a type filter')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, T(dict)), _TEST_DATA,
|
||||
msg='A single set should be wrapped into a path')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, (Ellipsis, T(str.upper))), ['STR'],
|
||||
msg='Transformation function should not raise')
|
||||
self.assertMaybeCountEqual(traverse_obj(_TEST_DATA, (Ellipsis, T(str_or_none))),
|
||||
[item for item in map(str_or_none, _TEST_DATA.values()) if item is not None],
|
||||
msg='Function in set should be a transformation')
|
||||
if __debug__:
|
||||
with self.assertRaises(Exception, msg='Sets with length != 1 should raise in debug'):
|
||||
traverse_obj(_TEST_DATA, set())
|
||||
with self.assertRaises(Exception, msg='Sets with length != 1 should raise in debug'):
|
||||
traverse_obj(_TEST_DATA, set((str.upper, str)))
|
||||
|
||||
# Test `slice` as a key
|
||||
_SLICE_DATA = [0, 1, 2, 3, 4]
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, ('dict', slice(1))), None,
|
||||
msg='slice on a dictionary should not throw')
|
||||
self.assertEqual(traverse_obj(_SLICE_DATA, slice(1)), _SLICE_DATA[:1],
|
||||
msg='slice key should apply slice to sequence')
|
||||
self.assertEqual(traverse_obj(_SLICE_DATA, slice(1, 2)), _SLICE_DATA[1:2],
|
||||
msg='slice key should apply slice to sequence')
|
||||
self.assertEqual(traverse_obj(_SLICE_DATA, slice(1, 4, 2)), _SLICE_DATA[1:4:2],
|
||||
msg='slice key should apply slice to sequence')
|
||||
|
||||
# Test alternative paths
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, 'fail', 'str'), 'str',
|
||||
msg='multiple `paths` should be treated as alternative paths')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, 'str', 100), 'str',
|
||||
msg='alternatives should exit early')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, 'fail', 'fail'), None,
|
||||
msg='alternatives should return `default` if exhausted')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, (Ellipsis, 'fail'), 100), 100,
|
||||
msg='alternatives should track their own branching return')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, ('dict', Ellipsis), ('data', Ellipsis)), list(_TEST_DATA['data']),
|
||||
msg='alternatives on empty objects should search further')
|
||||
|
||||
# Test branch and path nesting
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', (3, 0), 'url')), ['https://www.example.com/0'],
|
||||
msg='tuple as key should be treated as branches')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', [3, 0], 'url')), ['https://www.example.com/0'],
|
||||
msg='list as key should be treated as branches')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', ((1, 'fail'), (0, 'url')))), ['https://www.example.com/0'],
|
||||
msg='double nesting in path should be treated as paths')
|
||||
self.assertEqual(traverse_obj(['0', [1, 2]], [(0, 1), 0]), [1],
|
||||
msg='do not fail early on branching')
|
||||
self.assertCountEqual(traverse_obj(_TEST_DATA, ('urls', ((1, ('fail', 'url')), (0, 'url')))),
|
||||
['https://www.example.com/0', 'https://www.example.com/1'],
|
||||
msg='triple nesting in path should be treated as branches')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', ('fail', (Ellipsis, 'url')))),
|
||||
['https://www.example.com/0', 'https://www.example.com/1'],
|
||||
msg='ellipsis as branch path start gets flattened')
|
||||
|
||||
# Test dictionary as key
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2}), {0: 100, 1: 1.2},
|
||||
msg='dict key should result in a dict with the same keys')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', 0, 'url')}),
|
||||
{0: 'https://www.example.com/0'},
|
||||
msg='dict key should allow paths')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', (3, 0), 'url')}),
|
||||
{0: ['https://www.example.com/0']},
|
||||
msg='tuple in dict path should be treated as branches')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', ((1, 'fail'), (0, 'url')))}),
|
||||
{0: ['https://www.example.com/0']},
|
||||
msg='double nesting in dict path should be treated as paths')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', ((1, ('fail', 'url')), (0, 'url')))}),
|
||||
{0: ['https://www.example.com/1', 'https://www.example.com/0']},
|
||||
msg='triple nesting in dict path should be treated as branches')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'fail'}), {},
|
||||
msg='remove `None` values when top level dict key fails')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'fail'}, default=Ellipsis), {0: Ellipsis},
|
||||
msg='use `default` if key fails and `default`')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'dict'}), {},
|
||||
msg='remove empty values when dict key')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'dict'}, default=Ellipsis), {0: Ellipsis},
|
||||
msg='use `default` when dict key and a default')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 'fail'}}), {},
|
||||
msg='remove empty values when nested dict key fails')
|
||||
self.assertEqual(traverse_obj(None, {0: 'fail'}), {},
|
||||
msg='default to dict if pruned')
|
||||
self.assertEqual(traverse_obj(None, {0: 'fail'}, default=Ellipsis), {0: Ellipsis},
|
||||
msg='default to dict if pruned and default is given')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 'fail'}}, default=Ellipsis), {0: {0: Ellipsis}},
|
||||
msg='use nested `default` when nested dict key fails and `default`')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('dict', Ellipsis)}), {},
|
||||
msg='remove key if branch in dict key not successful')
|
||||
|
||||
# Testing default parameter behavior
|
||||
_DEFAULT_DATA = {'None': None, 'int': 0, 'list': []}
|
||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail'), None,
|
||||
msg='default value should be `None`')
|
||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail', 'fail', default=Ellipsis), Ellipsis,
|
||||
msg='chained fails should result in default')
|
||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'None', 'int'), 0,
|
||||
msg='should not short cirquit on `None`')
|
||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail', default=1), 1,
|
||||
msg='invalid dict key should result in `default`')
|
||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'None', default=1), 1,
|
||||
msg='`None` is a deliberate sentinel and should become `default`')
|
||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, ('list', 10)), None,
|
||||
msg='`IndexError` should result in `default`')
|
||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, (Ellipsis, 'fail'), default=1), 1,
|
||||
msg='if branched but not successful return `default` if defined, not `[]`')
|
||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, (Ellipsis, 'fail'), default=None), None,
|
||||
msg='if branched but not successful return `default` even if `default` is `None`')
|
||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, (Ellipsis, 'fail')), [],
|
||||
msg='if branched but not successful return `[]`, not `default`')
|
||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, ('list', Ellipsis)), [],
|
||||
msg='if branched but object is empty return `[]`, not `default`')
|
||||
self.assertEqual(traverse_obj(None, Ellipsis), [],
|
||||
msg='if branched but object is `None` return `[]`, not `default`')
|
||||
self.assertEqual(traverse_obj({0: None}, (0, Ellipsis)), [],
|
||||
msg='if branched but state is `None` return `[]`, not `default`')
|
||||
|
||||
branching_paths = [
|
||||
('fail', Ellipsis),
|
||||
(Ellipsis, 'fail'),
|
||||
100 * ('fail',) + (Ellipsis,),
|
||||
(Ellipsis,) + 100 * ('fail',),
|
||||
]
|
||||
for branching_path in branching_paths:
|
||||
self.assertEqual(traverse_obj({}, branching_path), [],
|
||||
msg='if branched but state is `None`, return `[]` (not `default`)')
|
||||
self.assertEqual(traverse_obj({}, 'fail', branching_path), [],
|
||||
msg='if branching in last alternative and previous did not match, return `[]` (not `default`)')
|
||||
self.assertEqual(traverse_obj({0: 'x'}, 0, branching_path), 'x',
|
||||
msg='if branching in last alternative and previous did match, return single value')
|
||||
self.assertEqual(traverse_obj({0: 'x'}, branching_path, 0), 'x',
|
||||
msg='if branching in first alternative and non-branching path does match, return single value')
|
||||
self.assertEqual(traverse_obj({}, branching_path, 'fail'), None,
|
||||
msg='if branching in first alternative and non-branching path does not match, return `default`')
|
||||
|
||||
# Testing expected_type behavior
|
||||
_EXPECTED_TYPE_DATA = {'str': 'str', 'int': 0}
|
||||
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=str),
|
||||
'str', msg='accept matching `expected_type` type')
|
||||
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=int),
|
||||
None, msg='reject non-matching `expected_type` type')
|
||||
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'int', expected_type=lambda x: str(x)),
|
||||
'0', msg='transform type using type function')
|
||||
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=lambda _: 1 / 0),
|
||||
None, msg='wrap expected_type function in try_call')
|
||||
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, Ellipsis, expected_type=str),
|
||||
['str'], msg='eliminate items that expected_type fails on')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2}, expected_type=int),
|
||||
{0: 100}, msg='type as expected_type should filter dict values')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2, 2: 'None'}, expected_type=str_or_none),
|
||||
{0: '100', 1: '1.2'}, msg='function as expected_type should transform dict values')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, ({0: 1.2}, 0, set((int_or_none,))), expected_type=int),
|
||||
1, msg='expected_type should not filter non-final dict values')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 100, 1: 'str'}}, expected_type=int),
|
||||
{0: {0: 100}}, msg='expected_type should transform deep dict values')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, [({0: '...'}, {0: '...'})], expected_type=type(Ellipsis)),
|
||||
[{0: Ellipsis}, {0: Ellipsis}], msg='expected_type should transform branched dict values')
|
||||
self.assertEqual(traverse_obj({1: {3: 4}}, [(1, 2), 3], expected_type=int),
|
||||
[4], msg='expected_type regression for type matching in tuple branching')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, ['data', Ellipsis], expected_type=int),
|
||||
[], msg='expected_type regression for type matching in dict result')
|
||||
|
||||
# Test get_all behavior
|
||||
_GET_ALL_DATA = {'key': [0, 1, 2]}
|
||||
self.assertEqual(traverse_obj(_GET_ALL_DATA, ('key', Ellipsis), get_all=False), 0,
|
||||
msg='if not `get_all`, return only first matching value')
|
||||
self.assertEqual(traverse_obj(_GET_ALL_DATA, Ellipsis, get_all=False), [0, 1, 2],
|
||||
msg='do not overflatten if not `get_all`')
|
||||
|
||||
# Test casesense behavior
|
||||
_CASESENSE_DATA = {
|
||||
'KeY': 'value0',
|
||||
0: {
|
||||
'KeY': 'value1',
|
||||
0: {'KeY': 'value2'},
|
||||
},
|
||||
# FULLWIDTH LATIN CAPITAL LETTER K
|
||||
'\uff2bey': 'value3',
|
||||
}
|
||||
self.assertEqual(traverse_obj(_CASESENSE_DATA, 'key'), None,
|
||||
msg='dict keys should be case sensitive unless `casesense`')
|
||||
self.assertEqual(traverse_obj(_CASESENSE_DATA, 'keY',
|
||||
casesense=False), 'value0',
|
||||
msg='allow non matching key case if `casesense`')
|
||||
self.assertEqual(traverse_obj(_CASESENSE_DATA, '\uff4bey', # FULLWIDTH LATIN SMALL LETTER K
|
||||
casesense=False), 'value3',
|
||||
msg='allow non matching Unicode key case if `casesense`')
|
||||
self.assertEqual(traverse_obj(_CASESENSE_DATA, (0, ('keY',)),
|
||||
casesense=False), ['value1'],
|
||||
msg='allow non matching key case in branch if `casesense`')
|
||||
self.assertEqual(traverse_obj(_CASESENSE_DATA, (0, ((0, 'keY'),)),
|
||||
casesense=False), ['value2'],
|
||||
msg='allow non matching key case in branch path if `casesense`')
|
||||
|
||||
# Test traverse_string behavior
|
||||
_TRAVERSE_STRING_DATA = {'str': 'str', 1.2: 1.2}
|
||||
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', 0)), None,
|
||||
msg='do not traverse into string if not `traverse_string`')
|
||||
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', 0),
|
||||
_traverse_string=True), 's',
|
||||
msg='traverse into string if `traverse_string`')
|
||||
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, (1.2, 1),
|
||||
_traverse_string=True), '.',
|
||||
msg='traverse into converted data if `traverse_string`')
|
||||
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', Ellipsis),
|
||||
_traverse_string=True), 'str',
|
||||
msg='`...` should result in string (same value) if `traverse_string`')
|
||||
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', slice(0, None, 2)),
|
||||
_traverse_string=True), 'sr',
|
||||
msg='`slice` should result in string if `traverse_string`')
|
||||
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', lambda i, v: i or v == 's'),
|
||||
_traverse_string=True), 'str',
|
||||
msg='function should result in string if `traverse_string`')
|
||||
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', (0, 2)),
|
||||
_traverse_string=True), ['s', 'r'],
|
||||
msg='branching should result in list if `traverse_string`')
|
||||
self.assertEqual(traverse_obj({}, (0, Ellipsis), _traverse_string=True), [],
|
||||
msg='branching should result in list if `traverse_string`')
|
||||
self.assertEqual(traverse_obj({}, (0, lambda x, y: True), _traverse_string=True), [],
|
||||
msg='branching should result in list if `traverse_string`')
|
||||
self.assertEqual(traverse_obj({}, (0, slice(1)), _traverse_string=True), [],
|
||||
msg='branching should result in list if `traverse_string`')
|
||||
|
||||
# Test re.Match as input obj
|
||||
mobj = re.match(r'^0(12)(?P<group>3)(4)?$', '0123')
|
||||
self.assertEqual(traverse_obj(mobj, Ellipsis), [x for x in mobj.groups() if x is not None],
|
||||
msg='`...` on a `re.Match` should give its `groups()`')
|
||||
self.assertEqual(traverse_obj(mobj, lambda k, _: k in (0, 2)), ['0123', '3'],
|
||||
msg='function on a `re.Match` should give groupno, value starting at 0')
|
||||
self.assertEqual(traverse_obj(mobj, 'group'), '3',
|
||||
msg='str key on a `re.Match` should give group with that name')
|
||||
self.assertEqual(traverse_obj(mobj, 2), '3',
|
||||
msg='int key on a `re.Match` should give group with that name')
|
||||
self.assertEqual(traverse_obj(mobj, 'gRoUp', casesense=False), '3',
|
||||
msg='str key on a `re.Match` should respect casesense')
|
||||
self.assertEqual(traverse_obj(mobj, 'fail'), None,
|
||||
msg='failing str key on a `re.Match` should return `default`')
|
||||
self.assertEqual(traverse_obj(mobj, 'gRoUpS', casesense=False), None,
|
||||
msg='failing str key on a `re.Match` should return `default`')
|
||||
self.assertEqual(traverse_obj(mobj, 8), None,
|
||||
msg='failing int key on a `re.Match` should return `default`')
|
||||
self.assertEqual(traverse_obj(mobj, lambda k, _: k in (0, 'group')), ['0123', '3'],
|
||||
msg='function on a `re.Match` should give group name as well')
|
||||
|
||||
# Test xml.etree.ElementTree.Element as input obj
|
||||
etree = compat_etree_fromstring('''<?xml version="1.0"?>
|
||||
<data>
|
||||
<country name="Liechtenstein">
|
||||
<rank>1</rank>
|
||||
<year>2008</year>
|
||||
<gdppc>141100</gdppc>
|
||||
<neighbor name="Austria" direction="E"/>
|
||||
<neighbor name="Switzerland" direction="W"/>
|
||||
</country>
|
||||
<country name="Singapore">
|
||||
<rank>4</rank>
|
||||
<year>2011</year>
|
||||
<gdppc>59900</gdppc>
|
||||
<neighbor name="Malaysia" direction="N"/>
|
||||
</country>
|
||||
<country name="Panama">
|
||||
<rank>68</rank>
|
||||
<year>2011</year>
|
||||
<gdppc>13600</gdppc>
|
||||
<neighbor name="Costa Rica" direction="W"/>
|
||||
<neighbor name="Colombia" direction="E"/>
|
||||
</country>
|
||||
</data>''')
|
||||
self.assertEqual(traverse_obj(etree, ''), etree,
|
||||
msg='empty str key should return the element itself')
|
||||
self.assertEqual(traverse_obj(etree, 'country'), list(etree),
|
||||
msg='str key should return all children with that tag name')
|
||||
self.assertEqual(traverse_obj(etree, Ellipsis), list(etree),
|
||||
msg='`...` as key should return all children')
|
||||
self.assertEqual(traverse_obj(etree, lambda _, x: x[0].text == '4'), [etree[1]],
|
||||
msg='function as key should get element as value')
|
||||
self.assertEqual(traverse_obj(etree, lambda i, _: i == 1), [etree[1]],
|
||||
msg='function as key should get index as key')
|
||||
self.assertEqual(traverse_obj(etree, 0), etree[0],
|
||||
msg='int key should return the nth child')
|
||||
self.assertEqual(traverse_obj(etree, './/neighbor/@name'),
|
||||
['Austria', 'Switzerland', 'Malaysia', 'Costa Rica', 'Colombia'],
|
||||
msg='`@<attribute>` at end of path should give that attribute')
|
||||
self.assertEqual(traverse_obj(etree, '//neighbor/@fail'), [None, None, None, None, None],
|
||||
msg='`@<nonexistent>` at end of path should give `None`')
|
||||
self.assertEqual(traverse_obj(etree, ('//neighbor/@', 2)), {'name': 'Malaysia', 'direction': 'N'},
|
||||
msg='`@` should give the full attribute dict')
|
||||
self.assertEqual(traverse_obj(etree, '//year/text()'), ['2008', '2011', '2011'],
|
||||
msg='`text()` at end of path should give the inner text')
|
||||
self.assertEqual(traverse_obj(etree, '//*[@direction]/@direction'), ['E', 'W', 'N', 'W', 'E'],
|
||||
msg='full python xpath features should be supported')
|
||||
self.assertEqual(traverse_obj(etree, (0, '@name')), 'Liechtenstein',
|
||||
msg='special transformations should act on current element')
|
||||
self.assertEqual(traverse_obj(etree, ('country', 0, Ellipsis, 'text()', T(int_or_none))), [1, 2008, 141100],
|
||||
msg='special transformations should act on current element')
|
||||
|
||||
def test_traversal_unbranching(self):
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, [(100, 1.2), all]), [100, 1.2],
|
||||
msg='`all` should give all results as list')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, [(100, 1.2), any]), 100,
|
||||
msg='`any` should give the first result')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, [100, all]), [100],
|
||||
msg='`all` should give list if non branching')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, [100, any]), 100,
|
||||
msg='`any` should give single item if non branching')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, [('dict', 'None', 100), all]), [100],
|
||||
msg='`all` should filter `None` and empty dict')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, [('dict', 'None', 100), any]), 100,
|
||||
msg='`any` should filter `None` and empty dict')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, [{
|
||||
'all': [('dict', 'None', 100, 1.2), all],
|
||||
'any': [('dict', 'None', 100, 1.2), any],
|
||||
}]), {'all': [100, 1.2], 'any': 100},
|
||||
msg='`all`/`any` should apply to each dict path separately')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, [{
|
||||
'all': [('dict', 'None', 100, 1.2), all],
|
||||
'any': [('dict', 'None', 100, 1.2), any],
|
||||
}], get_all=False), {'all': [100, 1.2], 'any': 100},
|
||||
msg='`all`/`any` should apply to dict regardless of `get_all`')
|
||||
self.assertIs(traverse_obj(_TEST_DATA, [('dict', 'None', 100, 1.2), all, T(float)]), None,
|
||||
msg='`all` should reset branching status')
|
||||
self.assertIs(traverse_obj(_TEST_DATA, [('dict', 'None', 100, 1.2), any, T(float)]), None,
|
||||
msg='`any` should reset branching status')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, [('dict', 'None', 100, 1.2), all, Ellipsis, T(float)]), [1.2],
|
||||
msg='`all` should allow further branching')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, [('dict', 'None', 'urls', 'data'), any, Ellipsis, 'index']), [0, 1],
|
||||
msg='`any` should allow further branching')
|
||||
|
||||
def test_traversal_morsel(self):
|
||||
values = {
|
||||
'expires': 'a',
|
||||
'path': 'b',
|
||||
'comment': 'c',
|
||||
'domain': 'd',
|
||||
'max-age': 'e',
|
||||
'secure': 'f',
|
||||
'httponly': 'g',
|
||||
'version': 'h',
|
||||
'samesite': 'i',
|
||||
}
|
||||
# SameSite added in Py3.8, breaks .update for 3.5-3.7
|
||||
if sys.version_info < (3, 8):
|
||||
del values['samesite']
|
||||
morsel = compat_http_cookies.Morsel()
|
||||
morsel.set(str('item_key'), 'item_value', 'coded_value')
|
||||
morsel.update(values)
|
||||
values['key'] = str('item_key')
|
||||
values['value'] = 'item_value'
|
||||
values = dict((str(k), v) for k, v in values.items())
|
||||
# make test pass even without ordered dict
|
||||
value_set = set(values.values())
|
||||
|
||||
for key, value in values.items():
|
||||
self.assertEqual(traverse_obj(morsel, key), value,
|
||||
msg='Morsel should provide access to all values')
|
||||
self.assertEqual(set(traverse_obj(morsel, Ellipsis)), value_set,
|
||||
msg='`...` should yield all values')
|
||||
self.assertEqual(set(traverse_obj(morsel, lambda k, v: True)), value_set,
|
||||
msg='function key should yield all values')
|
||||
self.assertIs(traverse_obj(morsel, [(None,), any]), morsel,
|
||||
msg='Morsel should not be implicitly changed to dict on usage')
|
||||
|
||||
def test_get_first(self):
|
||||
self.assertEqual(get_first([{'a': None}, {'a': 'spam'}], 'a'), 'spam')
|
||||
|
||||
def test_dict_get(self):
|
||||
FALSE_VALUES = {
|
||||
'none': None,
|
||||
'false': False,
|
||||
'zero': 0,
|
||||
'empty_string': '',
|
||||
'empty_list': [],
|
||||
}
|
||||
d = FALSE_VALUES.copy()
|
||||
d['a'] = 42
|
||||
self.assertEqual(dict_get(d, 'a'), 42)
|
||||
self.assertEqual(dict_get(d, 'b'), None)
|
||||
self.assertEqual(dict_get(d, 'b', 42), 42)
|
||||
self.assertEqual(dict_get(d, ('a', )), 42)
|
||||
self.assertEqual(dict_get(d, ('b', 'a', )), 42)
|
||||
self.assertEqual(dict_get(d, ('b', 'c', 'a', 'd', )), 42)
|
||||
self.assertEqual(dict_get(d, ('b', 'c', )), None)
|
||||
self.assertEqual(dict_get(d, ('b', 'c', ), 42), 42)
|
||||
for key, false_value in FALSE_VALUES.items():
|
||||
self.assertEqual(dict_get(d, ('b', 'c', key, )), None)
|
||||
self.assertEqual(dict_get(d, ('b', 'c', key, ), skip_false_values=False), false_value)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env python
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
from youtube_dl.extractor import YoutubeIE
|
||||
|
||||
|
||||
class TestYoutubeMisc(unittest.TestCase):
|
||||
def test_youtube_extract(self):
|
||||
assertExtractId = lambda url, id: self.assertEqual(YoutubeIE.extract_id(url), id)
|
||||
assertExtractId('http://www.youtube.com/watch?&v=BaW_jenozKc', 'BaW_jenozKc')
|
||||
assertExtractId('https://www.youtube.com/watch?&v=BaW_jenozKc', 'BaW_jenozKc')
|
||||
assertExtractId('https://www.youtube.com/watch?feature=player_embedded&v=BaW_jenozKc', 'BaW_jenozKc')
|
||||
assertExtractId('https://www.youtube.com/watch_popup?v=BaW_jenozKc', 'BaW_jenozKc')
|
||||
assertExtractId('http://www.youtube.com/watch?v=BaW_jenozKcsharePLED17F32AD9753930', 'BaW_jenozKc')
|
||||
assertExtractId('BaW_jenozKc', 'BaW_jenozKc')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -0,0 +1,35 @@
|
||||
<?xml version="1.0"?>
|
||||
<!-- MPD file Generated with GPAC version 1.0.1-revrelease at 2021-11-27T20:53:11.690Z -->
|
||||
<MPD xmlns="urn:mpeg:dash:schema:mpd:2011" minBufferTime="PT1.500S" type="static" mediaPresentationDuration="PT0H0M30.196S" maxSegmentDuration="PT0H0M10.027S" profiles="urn:mpeg:dash:profile:full:2011">
|
||||
<ProgramInformation moreInformationURL="http://gpac.io">
|
||||
<Title>manifest.mpd generated by GPAC</Title>
|
||||
</ProgramInformation>
|
||||
|
||||
<Period duration="PT0H0M30.196S">
|
||||
<AdaptationSet segmentAlignment="true" maxWidth="768" maxHeight="432" maxFrameRate="30000/1001" par="16:9" lang="und" startWithSAP="1">
|
||||
<Representation id="1" mimeType="video/mp4" codecs="avc1.4D401E" width="768" height="432" frameRate="30000/1001" sar="1:1" bandwidth="526987">
|
||||
<BaseURL>video_dashinit.mp4</BaseURL>
|
||||
<SegmentList timescale="90000" duration="900000">
|
||||
<Initialization range="0-881"/>
|
||||
<SegmentURL mediaRange="882-876094" indexRange="882-925"/>
|
||||
<SegmentURL mediaRange="876095-1466732" indexRange="876095-876138"/>
|
||||
<SegmentURL mediaRange="1466733-1953615" indexRange="1466733-1466776"/>
|
||||
<SegmentURL mediaRange="1953616-1994211" indexRange="1953616-1953659"/>
|
||||
</SegmentList>
|
||||
</Representation>
|
||||
</AdaptationSet>
|
||||
<AdaptationSet segmentAlignment="true" lang="und" startWithSAP="1">
|
||||
<Representation id="2" mimeType="audio/mp4" codecs="mp4a.40.2" audioSamplingRate="48000" bandwidth="98096">
|
||||
<AudioChannelConfiguration schemeIdUri="urn:mpeg:dash:23003:3:audio_channel_configuration:2011" value="2"/>
|
||||
<BaseURL>audio_dashinit.mp4</BaseURL>
|
||||
<SegmentList timescale="48000" duration="480000">
|
||||
<Initialization range="0-752"/>
|
||||
<SegmentURL mediaRange="753-124129" indexRange="753-796"/>
|
||||
<SegmentURL mediaRange="124130-250544" indexRange="124130-124173"/>
|
||||
<SegmentURL mediaRange="250545-374929" indexRange="250545-250588"/>
|
||||
</SegmentList>
|
||||
</Representation>
|
||||
</AdaptationSet>
|
||||
</Period>
|
||||
</MPD>
|
||||
|
@ -0,0 +1,351 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Created with Unified Streaming Platform (version=1.10.18-20255) -->
|
||||
<MPD
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns="urn:mpeg:dash:schema:mpd:2011"
|
||||
xsi:schemaLocation="urn:mpeg:dash:schema:mpd:2011 http://standards.iso.org/ittf/PubliclyAvailableStandards/MPEG-DASH_schema_files/DASH-MPD.xsd"
|
||||
type="static"
|
||||
mediaPresentationDuration="PT14M48S"
|
||||
maxSegmentDuration="PT1M"
|
||||
minBufferTime="PT10S"
|
||||
profiles="urn:mpeg:dash:profile:isoff-live:2011">
|
||||
<Period
|
||||
id="1"
|
||||
duration="PT14M48S">
|
||||
<BaseURL>dash/</BaseURL>
|
||||
<AdaptationSet
|
||||
id="1"
|
||||
group="1"
|
||||
contentType="audio"
|
||||
segmentAlignment="true"
|
||||
audioSamplingRate="48000"
|
||||
mimeType="audio/mp4"
|
||||
codecs="mp4a.40.2"
|
||||
startWithSAP="1">
|
||||
<AudioChannelConfiguration
|
||||
schemeIdUri="urn:mpeg:dash:23003:3:audio_channel_configuration:2011"
|
||||
value="2" />
|
||||
<Role schemeIdUri="urn:mpeg:dash:role:2011" value="main" />
|
||||
<SegmentTemplate
|
||||
timescale="48000"
|
||||
initialization="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$.dash"
|
||||
media="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$-$Time$.dash">
|
||||
<SegmentTimeline>
|
||||
<S t="0" d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="96256" r="2" />
|
||||
<S d="95232" />
|
||||
<S d="3584" />
|
||||
</SegmentTimeline>
|
||||
</SegmentTemplate>
|
||||
<Representation
|
||||
id="audio=128001"
|
||||
bandwidth="128001">
|
||||
</Representation>
|
||||
</AdaptationSet>
|
||||
<AdaptationSet
|
||||
id="2"
|
||||
group="3"
|
||||
contentType="text"
|
||||
lang="en"
|
||||
mimeType="application/mp4"
|
||||
codecs="stpp"
|
||||
startWithSAP="1">
|
||||
<Role schemeIdUri="urn:mpeg:dash:role:2011" value="subtitle" />
|
||||
<SegmentTemplate
|
||||
timescale="1000"
|
||||
initialization="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$.dash"
|
||||
media="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$-$Time$.dash">
|
||||
<SegmentTimeline>
|
||||
<S t="0" d="60000" r="9" />
|
||||
<S d="24000" />
|
||||
</SegmentTimeline>
|
||||
</SegmentTemplate>
|
||||
<Representation
|
||||
id="textstream_eng=1000"
|
||||
bandwidth="1000">
|
||||
</Representation>
|
||||
</AdaptationSet>
|
||||
<AdaptationSet
|
||||
id="3"
|
||||
group="2"
|
||||
contentType="video"
|
||||
par="960:409"
|
||||
minBandwidth="100000"
|
||||
maxBandwidth="4482000"
|
||||
maxWidth="1689"
|
||||
maxHeight="720"
|
||||
segmentAlignment="true"
|
||||
mimeType="video/mp4"
|
||||
codecs="avc1.4D401F"
|
||||
startWithSAP="1">
|
||||
<Role schemeIdUri="urn:mpeg:dash:role:2011" value="main" />
|
||||
<SegmentTemplate
|
||||
timescale="12288"
|
||||
initialization="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$.dash"
|
||||
media="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$-$Time$.dash">
|
||||
<SegmentTimeline>
|
||||
<S t="0" d="24576" r="443" />
|
||||
</SegmentTimeline>
|
||||
</SegmentTemplate>
|
||||
<Representation
|
||||
id="video=100000"
|
||||
bandwidth="100000"
|
||||
width="336"
|
||||
height="144"
|
||||
sar="2880:2863"
|
||||
scanType="progressive">
|
||||
</Representation>
|
||||
<Representation
|
||||
id="video=326000"
|
||||
bandwidth="326000"
|
||||
width="562"
|
||||
height="240"
|
||||
sar="115200:114929"
|
||||
scanType="progressive">
|
||||
</Representation>
|
||||
<Representation
|
||||
id="video=698000"
|
||||
bandwidth="698000"
|
||||
width="844"
|
||||
height="360"
|
||||
sar="86400:86299"
|
||||
scanType="progressive">
|
||||
</Representation>
|
||||
<Representation
|
||||
id="video=1493000"
|
||||
bandwidth="1493000"
|
||||
width="1126"
|
||||
height="480"
|
||||
sar="230400:230267"
|
||||
scanType="progressive">
|
||||
</Representation>
|
||||
<Representation
|
||||
id="video=4482000"
|
||||
bandwidth="4482000"
|
||||
width="1688"
|
||||
height="720"
|
||||
sar="86400:86299"
|
||||
scanType="progressive">
|
||||
</Representation>
|
||||
</AdaptationSet>
|
||||
</Period>
|
||||
</MPD>
|
@ -0,0 +1,32 @@
|
||||
<?xml version="1.0" ?>
|
||||
<MPD xmlns="urn:mpeg:dash:schema:mpd:2011" profiles="urn:mpeg:dash:profile:isoff-live:2011" minBufferTime="PT10.01S" mediaPresentationDuration="PT30.097S" type="static">
|
||||
<!-- Created with Bento4 mp4-dash.py, VERSION=2.0.0-639 -->
|
||||
<Period>
|
||||
<!-- Video -->
|
||||
<AdaptationSet mimeType="video/mp4" segmentAlignment="true" startWithSAP="1" maxWidth="768" maxHeight="432">
|
||||
<Representation id="video-avc1" codecs="avc1.4D401E" width="768" height="432" scanType="progressive" frameRate="30000/1001" bandwidth="699597">
|
||||
<SegmentList timescale="1000" duration="10010">
|
||||
<Initialization sourceURL="video-frag.mp4" range="36-746"/>
|
||||
<SegmentURL media="video-frag.mp4" mediaRange="747-876117"/>
|
||||
<SegmentURL media="video-frag.mp4" mediaRange="876118-1466913"/>
|
||||
<SegmentURL media="video-frag.mp4" mediaRange="1466914-1953954"/>
|
||||
<SegmentURL media="video-frag.mp4" mediaRange="1953955-1994652"/>
|
||||
</SegmentList>
|
||||
</Representation>
|
||||
</AdaptationSet>
|
||||
<!-- Audio -->
|
||||
<AdaptationSet mimeType="audio/mp4" startWithSAP="1" segmentAlignment="true">
|
||||
<Representation id="audio-und-mp4a.40.2" codecs="mp4a.40.2" bandwidth="98808" audioSamplingRate="48000">
|
||||
<AudioChannelConfiguration schemeIdUri="urn:mpeg:mpegB:cicp:ChannelConfiguration" value="2"/>
|
||||
<SegmentList timescale="1000" duration="10010">
|
||||
<Initialization sourceURL="audio-frag.mp4" range="32-623"/>
|
||||
<SegmentURL media="audio-frag.mp4" mediaRange="624-124199"/>
|
||||
<SegmentURL media="audio-frag.mp4" mediaRange="124200-250303"/>
|
||||
<SegmentURL media="audio-frag.mp4" mediaRange="250304-374365"/>
|
||||
<SegmentURL media="audio-frag.mp4" mediaRange="374366-374836"/>
|
||||
</SegmentList>
|
||||
</Representation>
|
||||
</AdaptationSet>
|
||||
</Period>
|
||||
</MPD>
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,66 @@
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals
|
||||
|
||||
try:
|
||||
import threading
|
||||
except ImportError:
|
||||
threading = None
|
||||
|
||||
from .common import FileDownloader
|
||||
from ..downloader import get_suitable_downloader
|
||||
from ..extractor.niconico import NiconicoIE
|
||||
from ..utils import sanitized_Request
|
||||
|
||||
|
||||
class NiconicoDmcFD(FileDownloader):
|
||||
""" Downloading niconico douga from DMC with heartbeat """
|
||||
|
||||
FD_NAME = 'niconico_dmc'
|
||||
|
||||
def real_download(self, filename, info_dict):
|
||||
self.to_screen('[%s] Downloading from DMC' % self.FD_NAME)
|
||||
|
||||
ie = NiconicoIE(self.ydl)
|
||||
info_dict, heartbeat_info_dict = ie._get_heartbeat_info(info_dict)
|
||||
|
||||
fd = get_suitable_downloader(info_dict, params=self.params)(self.ydl, self.params)
|
||||
for ph in self._progress_hooks:
|
||||
fd.add_progress_hook(ph)
|
||||
|
||||
if not threading:
|
||||
self.to_screen('[%s] Threading for Heartbeat not available' % self.FD_NAME)
|
||||
return fd.real_download(filename, info_dict)
|
||||
|
||||
success = download_complete = False
|
||||
timer = [None]
|
||||
heartbeat_lock = threading.Lock()
|
||||
heartbeat_url = heartbeat_info_dict['url']
|
||||
heartbeat_data = heartbeat_info_dict['data'].encode()
|
||||
heartbeat_interval = heartbeat_info_dict.get('interval', 30)
|
||||
|
||||
request = sanitized_Request(heartbeat_url, heartbeat_data)
|
||||
|
||||
def heartbeat():
|
||||
try:
|
||||
self.ydl.urlopen(request).read()
|
||||
except Exception:
|
||||
self.to_screen('[%s] Heartbeat failed' % self.FD_NAME)
|
||||
|
||||
with heartbeat_lock:
|
||||
if not download_complete:
|
||||
timer[0] = threading.Timer(heartbeat_interval, heartbeat)
|
||||
timer[0].start()
|
||||
|
||||
heartbeat_info_dict['ping']()
|
||||
self.to_screen('[%s] Heartbeat with %d second interval ...' % (self.FD_NAME, heartbeat_interval))
|
||||
try:
|
||||
heartbeat()
|
||||
if type(fd).__name__ == 'HlsFD':
|
||||
info_dict.update(ie._extract_m3u8_formats(info_dict['url'], info_dict['id'])[0])
|
||||
success = fd.real_download(filename, info_dict)
|
||||
finally:
|
||||
if heartbeat_lock:
|
||||
with heartbeat_lock:
|
||||
timer[0].cancel()
|
||||
download_complete = True
|
||||
return success
|
@ -0,0 +1,89 @@
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from .common import InfoExtractor
|
||||
from ..utils import (
|
||||
clean_html,
|
||||
dict_get,
|
||||
get_element_by_class,
|
||||
int_or_none,
|
||||
unified_strdate,
|
||||
url_or_none,
|
||||
)
|
||||
|
||||
|
||||
class Alsace20TVIE(InfoExtractor):
|
||||
_VALID_URL = r'https?://(?:www\.)?alsace20\.tv/(?:[\w-]+/)+[\w-]+-(?P<id>[\w]+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://www.alsace20.tv/VOD/Actu/JT/Votre-JT-jeudi-3-fevrier-lyNHCXpYJh.html',
|
||||
# 'md5': 'd91851bf9af73c0ad9b2cdf76c127fbb',
|
||||
'info_dict': {
|
||||
'id': 'lyNHCXpYJh',
|
||||
'ext': 'mp4',
|
||||
'description': 'md5:fc0bc4a0692d3d2dba4524053de4c7b7',
|
||||
'title': 'Votre JT du jeudi 3 février',
|
||||
'upload_date': '20220203',
|
||||
'thumbnail': r're:https?://.+\.jpg',
|
||||
'duration': 1073,
|
||||
'view_count': int,
|
||||
},
|
||||
'params': {
|
||||
'format': 'bestvideo',
|
||||
},
|
||||
}]
|
||||
|
||||
def _extract_video(self, video_id, url=None):
|
||||
info = self._download_json(
|
||||
'https://www.alsace20.tv/visionneuse/visio_v9_js.php?key=%s&habillage=0&mode=html' % (video_id, ),
|
||||
video_id) or {}
|
||||
title = info['titre']
|
||||
|
||||
formats = []
|
||||
for res, fmt_url in (info.get('files') or {}).items():
|
||||
formats.extend(
|
||||
self._extract_smil_formats(fmt_url, video_id, fatal=False)
|
||||
if '/smil:_' in fmt_url
|
||||
else self._extract_mpd_formats(fmt_url, video_id, mpd_id=res, fatal=False))
|
||||
self._sort_formats(formats)
|
||||
|
||||
webpage = (url and self._download_webpage(url, video_id, fatal=False)) or ''
|
||||
thumbnail = url_or_none(dict_get(info, ('image', 'preview', )) or self._og_search_thumbnail(webpage))
|
||||
upload_date = self._search_regex(r'/(\d{6})_', thumbnail, 'upload_date', default=None)
|
||||
upload_date = unified_strdate('20%s-%s-%s' % (upload_date[:2], upload_date[2:4], upload_date[4:])) if upload_date else None
|
||||
return {
|
||||
'id': video_id,
|
||||
'title': title,
|
||||
'formats': formats,
|
||||
'description': clean_html(get_element_by_class('wysiwyg', webpage)),
|
||||
'upload_date': upload_date,
|
||||
'thumbnail': thumbnail,
|
||||
'duration': int_or_none(self._og_search_property('video:duration', webpage) if webpage else None),
|
||||
'view_count': int_or_none(info.get('nb_vues')),
|
||||
}
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
return self._extract_video(video_id, url)
|
||||
|
||||
|
||||
class Alsace20TVEmbedIE(Alsace20TVIE):
|
||||
_VALID_URL = r'https?://(?:www\.)?alsace20\.tv/emb/(?P<id>[\w]+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://www.alsace20.tv/emb/lyNHCXpYJh',
|
||||
# 'md5': 'd91851bf9af73c0ad9b2cdf76c127fbb',
|
||||
'info_dict': {
|
||||
'id': 'lyNHCXpYJh',
|
||||
'ext': 'mp4',
|
||||
'title': 'Votre JT du jeudi 3 février',
|
||||
'upload_date': '20220203',
|
||||
'thumbnail': r're:https?://.+\.jpg',
|
||||
'view_count': int,
|
||||
},
|
||||
'params': {
|
||||
'format': 'bestvideo',
|
||||
},
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
return self._extract_video(video_id)
|
@ -0,0 +1,103 @@
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from .common import InfoExtractor
|
||||
from .youtube import YoutubeIE
|
||||
from .vimeo import VimeoIE
|
||||
from ..utils import (
|
||||
int_or_none,
|
||||
parse_iso8601,
|
||||
update_url_query,
|
||||
)
|
||||
|
||||
|
||||
class AmaraIE(InfoExtractor):
|
||||
_VALID_URL = r'https?://(?:www\.)?amara\.org/(?:\w+/)?videos/(?P<id>\w+)'
|
||||
_TESTS = [{
|
||||
# Youtube
|
||||
'url': 'https://amara.org/en/videos/jVx79ZKGK1ky/info/why-jury-trials-are-becoming-less-common/?tab=video',
|
||||
'md5': 'ea10daf2b6154b8c1ecf9922aca5e8ae',
|
||||
'info_dict': {
|
||||
'id': 'h6ZuVdvYnfE',
|
||||
'ext': 'mp4',
|
||||
'title': 'Why jury trials are becoming less common',
|
||||
'description': 'md5:a61811c319943960b6ab1c23e0cbc2c1',
|
||||
'thumbnail': r're:^https?://.*\.jpg$',
|
||||
'subtitles': dict,
|
||||
'upload_date': '20160813',
|
||||
'uploader': 'PBS NewsHour',
|
||||
'uploader_id': 'PBSNewsHour',
|
||||
'timestamp': 1549639570,
|
||||
}
|
||||
}, {
|
||||
# Vimeo
|
||||
'url': 'https://amara.org/en/videos/kYkK1VUTWW5I/info/vimeo-at-ces-2011',
|
||||
'md5': '99392c75fa05d432a8f11df03612195e',
|
||||
'info_dict': {
|
||||
'id': '18622084',
|
||||
'ext': 'mov',
|
||||
'title': 'Vimeo at CES 2011!',
|
||||
'description': 'md5:d41d8cd98f00b204e9800998ecf8427e',
|
||||
'thumbnail': r're:^https?://.*\.jpg$',
|
||||
'subtitles': dict,
|
||||
'timestamp': 1294763658,
|
||||
'upload_date': '20110111',
|
||||
'uploader': 'Sam Morrill',
|
||||
'uploader_id': 'sammorrill'
|
||||
}
|
||||
}, {
|
||||
# Direct Link
|
||||
'url': 'https://amara.org/en/videos/s8KL7I3jLmh6/info/the-danger-of-a-single-story/',
|
||||
'md5': 'd3970f08512738ee60c5807311ff5d3f',
|
||||
'info_dict': {
|
||||
'id': 's8KL7I3jLmh6',
|
||||
'ext': 'mp4',
|
||||
'title': 'The danger of a single story',
|
||||
'description': 'md5:d769b31139c3b8bb5be9177f62ea3f23',
|
||||
'thumbnail': r're:^https?://.*\.jpg$',
|
||||
'subtitles': dict,
|
||||
'upload_date': '20091007',
|
||||
'timestamp': 1254942511,
|
||||
}
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
meta = self._download_json(
|
||||
'https://amara.org/api/videos/%s/' % video_id,
|
||||
video_id, query={'format': 'json'})
|
||||
title = meta['title']
|
||||
video_url = meta['all_urls'][0]
|
||||
|
||||
subtitles = {}
|
||||
for language in (meta.get('languages') or []):
|
||||
subtitles_uri = language.get('subtitles_uri')
|
||||
if not (subtitles_uri and language.get('published')):
|
||||
continue
|
||||
subtitle = subtitles.setdefault(language.get('code') or 'en', [])
|
||||
for f in ('json', 'srt', 'vtt'):
|
||||
subtitle.append({
|
||||
'ext': f,
|
||||
'url': update_url_query(subtitles_uri, {'format': f}),
|
||||
})
|
||||
|
||||
info = {
|
||||
'url': video_url,
|
||||
'id': video_id,
|
||||
'subtitles': subtitles,
|
||||
'title': title,
|
||||
'description': meta.get('description'),
|
||||
'thumbnail': meta.get('thumbnail'),
|
||||
'duration': int_or_none(meta.get('duration')),
|
||||
'timestamp': parse_iso8601(meta.get('created')),
|
||||
}
|
||||
|
||||
for ie in (YoutubeIE, VimeoIE):
|
||||
if ie.suitable(video_url):
|
||||
info.update({
|
||||
'_type': 'url_transparent',
|
||||
'ie_key': ie.ie_key(),
|
||||
})
|
||||
break
|
||||
|
||||
return info
|
@ -0,0 +1,93 @@
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from .common import InfoExtractor
|
||||
from ..utils import (
|
||||
clean_html,
|
||||
clean_podcast_url,
|
||||
get_element_by_class,
|
||||
int_or_none,
|
||||
parse_codecs,
|
||||
parse_iso8601,
|
||||
try_get,
|
||||
)
|
||||
|
||||
|
||||
class ApplePodcastsIE(InfoExtractor):
|
||||
_VALID_URL = r'https?://podcasts\.apple\.com/(?:[^/]+/)?podcast(?:/[^/]+){1,2}.*?\bi=(?P<id>\d+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://podcasts.apple.com/us/podcast/207-whitney-webb-returns/id1135137367?i=1000482637777',
|
||||
'md5': '41dc31cd650143e530d9423b6b5a344f',
|
||||
'info_dict': {
|
||||
'id': '1000482637777',
|
||||
'ext': 'mp3',
|
||||
'title': '207 - Whitney Webb Returns',
|
||||
'description': 'md5:75ef4316031df7b41ced4e7b987f79c6',
|
||||
'upload_date': '20200705',
|
||||
'timestamp': 1593932400,
|
||||
'duration': 6454,
|
||||
'series': 'The Tim Dillon Show',
|
||||
'thumbnail': 're:.+[.](png|jpe?g|webp)',
|
||||
}
|
||||
}, {
|
||||
'url': 'https://podcasts.apple.com/podcast/207-whitney-webb-returns/id1135137367?i=1000482637777',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'https://podcasts.apple.com/podcast/207-whitney-webb-returns?i=1000482637777',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'https://podcasts.apple.com/podcast/id1135137367?i=1000482637777',
|
||||
'only_matching': True,
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
episode_id = self._match_id(url)
|
||||
webpage = self._download_webpage(url, episode_id)
|
||||
episode_data = {}
|
||||
ember_data = {}
|
||||
# new page type 2021-11
|
||||
amp_data = self._parse_json(self._search_regex(
|
||||
r'(?s)id="shoebox-media-api-cache-amp-podcasts"[^>]*>\s*({.+?})\s*<',
|
||||
webpage, 'AMP data', default='{}'), episode_id, fatal=False) or {}
|
||||
amp_data = try_get(amp_data,
|
||||
lambda a: self._parse_json(
|
||||
next(a[x] for x in iter(a) if episode_id in x),
|
||||
episode_id),
|
||||
dict) or {}
|
||||
amp_data = amp_data.get('d') or []
|
||||
episode_data = try_get(
|
||||
amp_data,
|
||||
lambda a: next(x for x in a
|
||||
if x['type'] == 'podcast-episodes' and x['id'] == episode_id),
|
||||
dict)
|
||||
if not episode_data:
|
||||
# try pre 2021-11 page type: TODO: consider deleting if no longer used
|
||||
ember_data = self._parse_json(self._search_regex(
|
||||
r'(?s)id="shoebox-ember-data-store"[^>]*>\s*({.+?})\s*<',
|
||||
webpage, 'ember data'), episode_id) or {}
|
||||
ember_data = ember_data.get(episode_id) or ember_data
|
||||
episode_data = try_get(ember_data, lambda x: x['data'], dict)
|
||||
episode = episode_data['attributes']
|
||||
description = episode.get('description') or {}
|
||||
|
||||
series = None
|
||||
for inc in (amp_data or ember_data.get('included') or []):
|
||||
if inc.get('type') == 'media/podcast':
|
||||
series = try_get(inc, lambda x: x['attributes']['name'])
|
||||
series = series or clean_html(get_element_by_class('podcast-header__identity', webpage))
|
||||
|
||||
info = [{
|
||||
'id': episode_id,
|
||||
'title': episode['name'],
|
||||
'url': clean_podcast_url(episode['assetUrl']),
|
||||
'description': description.get('standard') or description.get('short'),
|
||||
'timestamp': parse_iso8601(episode.get('releaseDateTime')),
|
||||
'duration': int_or_none(episode.get('durationInMilliseconds'), 1000),
|
||||
'series': series,
|
||||
'thumbnail': self._og_search_thumbnail(webpage),
|
||||
}]
|
||||
self._sort_formats(info)
|
||||
info = info[0]
|
||||
codecs = parse_codecs(info.get('ext', 'mp3'))
|
||||
info.update(codecs)
|
||||
return info
|
@ -0,0 +1,174 @@
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import re
|
||||
|
||||
from .common import InfoExtractor
|
||||
from ..utils import (
|
||||
extract_attributes,
|
||||
int_or_none,
|
||||
parse_iso8601,
|
||||
try_get,
|
||||
)
|
||||
|
||||
|
||||
class ArcPublishingIE(InfoExtractor):
|
||||
_UUID_REGEX = r'[\da-f]{8}-(?:[\da-f]{4}-){3}[\da-f]{12}'
|
||||
_VALID_URL = r'arcpublishing:(?P<org>[a-z]+):(?P<id>%s)' % _UUID_REGEX
|
||||
_TESTS = [{
|
||||
# https://www.adn.com/politics/2020/11/02/video-senate-candidates-campaign-in-anchorage-on-eve-of-election-day/
|
||||
'url': 'arcpublishing:adn:8c99cb6e-b29c-4bc9-9173-7bf9979225ab',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
# https://www.bostonglobe.com/video/2020/12/30/metro/footage-released-showing-officer-talking-about-striking-protesters-with-car/
|
||||
'url': 'arcpublishing:bostonglobe:232b7ae6-7d73-432d-bc0a-85dbf0119ab1',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
# https://www.actionnewsjax.com/video/live-stream/
|
||||
'url': 'arcpublishing:cmg:cfb1cf1b-3ab5-4d1b-86c5-a5515d311f2a',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
# https://elcomercio.pe/videos/deportes/deporte-total-futbol-peruano-seleccion-peruana-la-valorizacion-de-los-peruanos-en-el-exterior-tras-un-2020-atipico-nnav-vr-video-noticia/
|
||||
'url': 'arcpublishing:elcomercio:27a7e1f8-2ec7-4177-874f-a4feed2885b3',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
# https://www.clickondetroit.com/video/community/2020/05/15/events-surrounding-woodward-dream-cruise-being-canceled/
|
||||
'url': 'arcpublishing:gmg:c8793fb2-8d44-4242-881e-2db31da2d9fe',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
# https://www.wabi.tv/video/2020/12/30/trenton-company-making-equipment-pfizer-covid-vaccine/
|
||||
'url': 'arcpublishing:gray:0b0ba30e-032a-4598-8810-901d70e6033e',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
# https://www.lateja.cr/el-mundo/video-china-aprueba-con-condiciones-su-primera/dfcbfa57-527f-45ff-a69b-35fe71054143/video/
|
||||
'url': 'arcpublishing:gruponacion:dfcbfa57-527f-45ff-a69b-35fe71054143',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
# https://www.fifthdomain.com/video/2018/03/09/is-america-vulnerable-to-a-cyber-attack/
|
||||
'url': 'arcpublishing:mco:aa0ca6fe-1127-46d4-b32c-be0d6fdb8055',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
# https://www.vl.no/kultur/2020/12/09/en-melding-fra-en-lytter-endret-julelista-til-lewi-bergrud/
|
||||
'url': 'arcpublishing:mentormedier:47a12084-650b-4011-bfd0-3699b6947b2d',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
# https://www.14news.com/2020/12/30/whiskey-theft-caught-camera-henderson-liquor-store/
|
||||
'url': 'arcpublishing:raycom:b89f61f8-79fa-4c09-8255-e64237119bf7',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
# https://www.theglobeandmail.com/world/video-ethiopian-woman-who-became-symbol-of-integration-in-italy-killed-on/
|
||||
'url': 'arcpublishing:tgam:411b34c1-8701-4036-9831-26964711664b',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
# https://www.pilotonline.com/460f2931-8130-4719-8ea1-ffcb2d7cb685-132.html
|
||||
'url': 'arcpublishing:tronc:460f2931-8130-4719-8ea1-ffcb2d7cb685',
|
||||
'only_matching': True,
|
||||
}]
|
||||
_POWA_DEFAULTS = [
|
||||
(['cmg', 'prisa'], '%s-config-prod.api.cdn.arcpublishing.com/video'),
|
||||
([
|
||||
'adn', 'advancelocal', 'answers', 'bonnier', 'bostonglobe', 'demo',
|
||||
'gmg', 'gruponacion', 'infobae', 'mco', 'nzme', 'pmn', 'raycom',
|
||||
'spectator', 'tbt', 'tgam', 'tronc', 'wapo', 'wweek',
|
||||
], 'video-api-cdn.%s.arcpublishing.com/api'),
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def _extract_urls(webpage):
|
||||
entries = []
|
||||
# https://arcpublishing.atlassian.net/wiki/spaces/POWA/overview
|
||||
for powa_el in re.findall(r'(<div[^>]+class="[^"]*\bpowa\b[^"]*"[^>]+data-uuid="%s"[^>]*>)' % ArcPublishingIE._UUID_REGEX, webpage):
|
||||
powa = extract_attributes(powa_el) or {}
|
||||
org = powa.get('data-org')
|
||||
uuid = powa.get('data-uuid')
|
||||
if org and uuid:
|
||||
entries.append('arcpublishing:%s:%s' % (org, uuid))
|
||||
return entries
|
||||
|
||||
def _real_extract(self, url):
|
||||
org, uuid = re.match(self._VALID_URL, url).groups()
|
||||
for orgs, tmpl in self._POWA_DEFAULTS:
|
||||
if org in orgs:
|
||||
base_api_tmpl = tmpl
|
||||
break
|
||||
else:
|
||||
base_api_tmpl = '%s-prod-cdn.video-api.arcpublishing.com/api'
|
||||
if org == 'wapo':
|
||||
org = 'washpost'
|
||||
video = self._download_json(
|
||||
'https://%s/v1/ansvideos/findByUuid' % (base_api_tmpl % org),
|
||||
uuid, query={'uuid': uuid})[0]
|
||||
title = video['headlines']['basic']
|
||||
is_live = video.get('status') == 'live'
|
||||
|
||||
urls = []
|
||||
formats = []
|
||||
for s in video.get('streams', []):
|
||||
s_url = s.get('url')
|
||||
if not s_url or s_url in urls:
|
||||
continue
|
||||
urls.append(s_url)
|
||||
stream_type = s.get('stream_type')
|
||||
if stream_type == 'smil':
|
||||
smil_formats = self._extract_smil_formats(
|
||||
s_url, uuid, fatal=False)
|
||||
for f in smil_formats:
|
||||
if f['url'].endswith('/cfx/st'):
|
||||
f['app'] = 'cfx/st'
|
||||
if not f['play_path'].startswith('mp4:'):
|
||||
f['play_path'] = 'mp4:' + f['play_path']
|
||||
if isinstance(f['tbr'], float):
|
||||
f['vbr'] = f['tbr'] * 1000
|
||||
del f['tbr']
|
||||
f['format_id'] = 'rtmp-%d' % f['vbr']
|
||||
formats.extend(smil_formats)
|
||||
elif stream_type in ('ts', 'hls'):
|
||||
m3u8_formats = self._extract_m3u8_formats(
|
||||
s_url, uuid, 'mp4', 'm3u8' if is_live else 'm3u8_native',
|
||||
m3u8_id='hls', fatal=False)
|
||||
if all([f.get('acodec') == 'none' for f in m3u8_formats]):
|
||||
continue
|
||||
for f in m3u8_formats:
|
||||
if f.get('acodec') == 'none':
|
||||
f['preference'] = -40
|
||||
elif f.get('vcodec') == 'none':
|
||||
f['preference'] = -50
|
||||
height = f.get('height')
|
||||
if not height:
|
||||
continue
|
||||
vbr = self._search_regex(
|
||||
r'[_x]%d[_-](\d+)' % height, f['url'], 'vbr', default=None)
|
||||
if vbr:
|
||||
f['vbr'] = int(vbr)
|
||||
formats.extend(m3u8_formats)
|
||||
else:
|
||||
vbr = int_or_none(s.get('bitrate'))
|
||||
formats.append({
|
||||
'format_id': '%s-%d' % (stream_type, vbr) if vbr else stream_type,
|
||||
'vbr': vbr,
|
||||
'width': int_or_none(s.get('width')),
|
||||
'height': int_or_none(s.get('height')),
|
||||
'filesize': int_or_none(s.get('filesize')),
|
||||
'url': s_url,
|
||||
'preference': -1,
|
||||
})
|
||||
self._sort_formats(
|
||||
formats, ('preference', 'width', 'height', 'vbr', 'filesize', 'tbr', 'ext', 'format_id'))
|
||||
|
||||
subtitles = {}
|
||||
for subtitle in (try_get(video, lambda x: x['subtitles']['urls'], list) or []):
|
||||
subtitle_url = subtitle.get('url')
|
||||
if subtitle_url:
|
||||
subtitles.setdefault('en', []).append({'url': subtitle_url})
|
||||
|
||||
return {
|
||||
'id': uuid,
|
||||
'title': self._live_title(title) if is_live else title,
|
||||
'thumbnail': try_get(video, lambda x: x['promo_image']['url']),
|
||||
'description': try_get(video, lambda x: x['subheadlines']['basic']),
|
||||
'formats': formats,
|
||||
'duration': int_or_none(video.get('duration'), 100),
|
||||
'timestamp': parse_iso8601(video.get('created_date')),
|
||||
'subtitles': subtitles,
|
||||
'is_live': is_live,
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue