Compare commits
No commits in common. 'master' and '2021.02.22' have entirely different histories.
master
...
2021.02.22
@ -1 +0,0 @@
|
|||||||
blank_issues_enabled: false
|
|
@ -1 +0,0 @@
|
|||||||
# Empty file needed to make devscripts.utils properly importable from outside
|
|
@ -1,83 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
# coding: utf-8
|
|
||||||
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
"""
|
|
||||||
This script displays the API parameters corresponding to a yt-dl command line
|
|
||||||
|
|
||||||
Example:
|
|
||||||
$ ./cli_to_api.py -f best
|
|
||||||
{u'format': 'best'}
|
|
||||||
$
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Allow direct execution
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
import youtube_dl
|
|
||||||
from types import MethodType
|
|
||||||
|
|
||||||
|
|
||||||
def cli_to_api(*opts):
|
|
||||||
YDL = youtube_dl.YoutubeDL
|
|
||||||
|
|
||||||
# to extract the parsed options, break out of YoutubeDL instantiation
|
|
||||||
|
|
||||||
# return options via this Exception
|
|
||||||
class ParseYTDLResult(Exception):
|
|
||||||
def __init__(self, result):
|
|
||||||
super(ParseYTDLResult, self).__init__('result')
|
|
||||||
self.opts = result
|
|
||||||
|
|
||||||
# replacement constructor that raises ParseYTDLResult
|
|
||||||
def ytdl_init(ydl, ydl_opts):
|
|
||||||
super(YDL, ydl).__init__(ydl_opts)
|
|
||||||
raise ParseYTDLResult(ydl_opts)
|
|
||||||
|
|
||||||
# patch in the constructor
|
|
||||||
YDL.__init__ = MethodType(ytdl_init, YDL)
|
|
||||||
|
|
||||||
# core parser
|
|
||||||
def parsed_options(argv):
|
|
||||||
try:
|
|
||||||
youtube_dl._real_main(list(argv))
|
|
||||||
except ParseYTDLResult as result:
|
|
||||||
return result.opts
|
|
||||||
|
|
||||||
# from https://github.com/yt-dlp/yt-dlp/issues/5859#issuecomment-1363938900
|
|
||||||
default = parsed_options([])
|
|
||||||
|
|
||||||
def neq_opt(a, b):
|
|
||||||
if a == b:
|
|
||||||
return False
|
|
||||||
if a is None and repr(type(object)).endswith(".utils.DateRange'>"):
|
|
||||||
return '0001-01-01 - 9999-12-31' != '{0}'.format(b)
|
|
||||||
return a != b
|
|
||||||
|
|
||||||
diff = dict((k, v) for k, v in parsed_options(opts).items() if neq_opt(default[k], v))
|
|
||||||
if 'postprocessors' in diff:
|
|
||||||
diff['postprocessors'] = [pp for pp in diff['postprocessors'] if pp not in default['postprocessors']]
|
|
||||||
return diff
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
from pprint import PrettyPrinter
|
|
||||||
|
|
||||||
pprint = PrettyPrinter()
|
|
||||||
super_format = pprint.format
|
|
||||||
|
|
||||||
def format(object, context, maxlevels, level):
|
|
||||||
if repr(type(object)).endswith(".utils.DateRange'>"):
|
|
||||||
return '{0}: {1}>'.format(repr(object)[:-2], object), True, False
|
|
||||||
return super_format(object, context, maxlevels, level)
|
|
||||||
|
|
||||||
pprint.format = format
|
|
||||||
|
|
||||||
pprint.pprint(cli_to_api(*sys.argv))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
@ -1,62 +0,0 @@
|
|||||||
# coding: utf-8
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import functools
|
|
||||||
import os.path
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
|
|
||||||
dirn = os.path.dirname
|
|
||||||
|
|
||||||
sys.path.insert(0, dirn(dirn(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
from youtube_dl.compat import (
|
|
||||||
compat_kwargs,
|
|
||||||
compat_open as open,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def read_file(fname):
|
|
||||||
with open(fname, encoding='utf-8') as f:
|
|
||||||
return f.read()
|
|
||||||
|
|
||||||
|
|
||||||
def write_file(fname, content, mode='w'):
|
|
||||||
with open(fname, mode, encoding='utf-8') as f:
|
|
||||||
return f.write(content)
|
|
||||||
|
|
||||||
|
|
||||||
def read_version(fname='youtube_dl/version.py'):
|
|
||||||
"""Get the version without importing the package"""
|
|
||||||
exec(compile(read_file(fname), fname, 'exec'))
|
|
||||||
return locals()['__version__']
|
|
||||||
|
|
||||||
|
|
||||||
def get_filename_args(has_infile=False, default_outfile=None):
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
if has_infile:
|
|
||||||
parser.add_argument('infile', help='Input file')
|
|
||||||
kwargs = {'nargs': '?', 'default': default_outfile} if default_outfile else {}
|
|
||||||
kwargs['help'] = 'Output file'
|
|
||||||
parser.add_argument('outfile', **compat_kwargs(kwargs))
|
|
||||||
|
|
||||||
opts = parser.parse_args()
|
|
||||||
if has_infile:
|
|
||||||
return opts.infile, opts.outfile
|
|
||||||
return opts.outfile
|
|
||||||
|
|
||||||
|
|
||||||
def compose_functions(*functions):
|
|
||||||
return lambda x: functools.reduce(lambda y, f: f(y), functions, x)
|
|
||||||
|
|
||||||
|
|
||||||
def run_process(*args, **kwargs):
|
|
||||||
kwargs.setdefault('text', True)
|
|
||||||
kwargs.setdefault('check', True)
|
|
||||||
kwargs.setdefault('capture_output', True)
|
|
||||||
if kwargs['text']:
|
|
||||||
kwargs.setdefault('encoding', 'utf-8')
|
|
||||||
kwargs.setdefault('errors', 'replace')
|
|
||||||
kwargs = compat_kwargs(kwargs)
|
|
||||||
return subprocess.run(args, **kwargs)
|
|
@ -1,272 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
# coding: utf-8
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
# Allow direct execution
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
import subprocess
|
|
||||||
import unittest
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
from test.helper import (
|
|
||||||
FakeLogger,
|
|
||||||
FakeYDL,
|
|
||||||
http_server_port,
|
|
||||||
try_rm,
|
|
||||||
)
|
|
||||||
from youtube_dl import YoutubeDL
|
|
||||||
from youtube_dl.compat import (
|
|
||||||
compat_contextlib_suppress,
|
|
||||||
compat_http_cookiejar_Cookie,
|
|
||||||
compat_http_server,
|
|
||||||
compat_kwargs,
|
|
||||||
)
|
|
||||||
from youtube_dl.utils import (
|
|
||||||
encodeFilename,
|
|
||||||
join_nonempty,
|
|
||||||
)
|
|
||||||
from youtube_dl.downloader.external import (
|
|
||||||
Aria2cFD,
|
|
||||||
Aria2pFD,
|
|
||||||
AxelFD,
|
|
||||||
CurlFD,
|
|
||||||
FFmpegFD,
|
|
||||||
HttpieFD,
|
|
||||||
WgetFD,
|
|
||||||
)
|
|
||||||
from youtube_dl.postprocessor import (
|
|
||||||
FFmpegPostProcessor,
|
|
||||||
)
|
|
||||||
import threading
|
|
||||||
|
|
||||||
TEST_SIZE = 10 * 1024
|
|
||||||
|
|
||||||
TEST_COOKIE = {
|
|
||||||
'version': 0,
|
|
||||||
'name': 'test',
|
|
||||||
'value': 'ytdlp',
|
|
||||||
'port': None,
|
|
||||||
'port_specified': False,
|
|
||||||
'domain': '.example.com',
|
|
||||||
'domain_specified': True,
|
|
||||||
'domain_initial_dot': False,
|
|
||||||
'path': '/',
|
|
||||||
'path_specified': True,
|
|
||||||
'secure': False,
|
|
||||||
'expires': None,
|
|
||||||
'discard': False,
|
|
||||||
'comment': None,
|
|
||||||
'comment_url': None,
|
|
||||||
'rest': {},
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST_COOKIE_VALUE = join_nonempty('name', 'value', delim='=', from_dict=TEST_COOKIE)
|
|
||||||
|
|
||||||
TEST_INFO = {'url': 'http://www.example.com/'}
|
|
||||||
|
|
||||||
|
|
||||||
def cookiejar_Cookie(**cookie_args):
|
|
||||||
return compat_http_cookiejar_Cookie(**compat_kwargs(cookie_args))
|
|
||||||
|
|
||||||
|
|
||||||
def ifExternalFDAvailable(externalFD):
|
|
||||||
return unittest.skipUnless(externalFD.available(),
|
|
||||||
externalFD.get_basename() + ' not found')
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPTestRequestHandler(compat_http_server.BaseHTTPRequestHandler):
|
|
||||||
def log_message(self, format, *args):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def send_content_range(self, total=None):
|
|
||||||
range_header = self.headers.get('Range')
|
|
||||||
start = end = None
|
|
||||||
if range_header:
|
|
||||||
mobj = re.match(r'bytes=(\d+)-(\d+)', range_header)
|
|
||||||
if mobj:
|
|
||||||
start, end = (int(mobj.group(i)) for i in (1, 2))
|
|
||||||
valid_range = start is not None and end is not None
|
|
||||||
if valid_range:
|
|
||||||
content_range = 'bytes %d-%d' % (start, end)
|
|
||||||
if total:
|
|
||||||
content_range += '/%d' % total
|
|
||||||
self.send_header('Content-Range', content_range)
|
|
||||||
return (end - start + 1) if valid_range else total
|
|
||||||
|
|
||||||
def serve(self, range=True, content_length=True):
|
|
||||||
self.send_response(200)
|
|
||||||
self.send_header('Content-Type', 'video/mp4')
|
|
||||||
size = TEST_SIZE
|
|
||||||
if range:
|
|
||||||
size = self.send_content_range(TEST_SIZE)
|
|
||||||
if content_length:
|
|
||||||
self.send_header('Content-Length', size)
|
|
||||||
self.end_headers()
|
|
||||||
self.wfile.write(b'#' * size)
|
|
||||||
|
|
||||||
def do_GET(self):
|
|
||||||
if self.path == '/regular':
|
|
||||||
self.serve()
|
|
||||||
elif self.path == '/no-content-length':
|
|
||||||
self.serve(content_length=False)
|
|
||||||
elif self.path == '/no-range':
|
|
||||||
self.serve(range=False)
|
|
||||||
elif self.path == '/no-range-no-content-length':
|
|
||||||
self.serve(range=False, content_length=False)
|
|
||||||
else:
|
|
||||||
assert False, 'unrecognised server path'
|
|
||||||
|
|
||||||
|
|
||||||
@ifExternalFDAvailable(Aria2pFD)
|
|
||||||
class TestAria2pFD(unittest.TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.httpd = compat_http_server.HTTPServer(
|
|
||||||
('127.0.0.1', 0), HTTPTestRequestHandler)
|
|
||||||
self.port = http_server_port(self.httpd)
|
|
||||||
self.server_thread = threading.Thread(target=self.httpd.serve_forever)
|
|
||||||
self.server_thread.daemon = True
|
|
||||||
self.server_thread.start()
|
|
||||||
|
|
||||||
def download(self, params, ep):
|
|
||||||
with subprocess.Popen(
|
|
||||||
['aria2c', '--enable-rpc'],
|
|
||||||
stdout=subprocess.DEVNULL,
|
|
||||||
stderr=subprocess.DEVNULL
|
|
||||||
) as process:
|
|
||||||
if not process.poll():
|
|
||||||
filename = 'testfile.mp4'
|
|
||||||
params['logger'] = FakeLogger()
|
|
||||||
params['outtmpl'] = filename
|
|
||||||
ydl = YoutubeDL(params)
|
|
||||||
try_rm(encodeFilename(filename))
|
|
||||||
self.assertEqual(ydl.download(['http://127.0.0.1:%d/%s' % (self.port, ep)]), 0)
|
|
||||||
self.assertEqual(os.path.getsize(encodeFilename(filename)), TEST_SIZE)
|
|
||||||
try_rm(encodeFilename(filename))
|
|
||||||
process.kill()
|
|
||||||
|
|
||||||
def download_all(self, params):
|
|
||||||
for ep in ('regular', 'no-content-length', 'no-range', 'no-range-no-content-length'):
|
|
||||||
self.download(params, ep)
|
|
||||||
|
|
||||||
def test_regular(self):
|
|
||||||
self.download_all({'external_downloader': 'aria2p'})
|
|
||||||
|
|
||||||
def test_chunked(self):
|
|
||||||
self.download_all({
|
|
||||||
'external_downloader': 'aria2p',
|
|
||||||
'http_chunk_size': 1000,
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
@ifExternalFDAvailable(HttpieFD)
|
|
||||||
class TestHttpieFD(unittest.TestCase):
|
|
||||||
def test_make_cmd(self):
|
|
||||||
with FakeYDL() as ydl:
|
|
||||||
downloader = HttpieFD(ydl, {})
|
|
||||||
self.assertEqual(
|
|
||||||
downloader._make_cmd('test', TEST_INFO),
|
|
||||||
['http', '--download', '--output', 'test', 'http://www.example.com/'])
|
|
||||||
|
|
||||||
# Test cookie header is added
|
|
||||||
ydl.cookiejar.set_cookie(cookiejar_Cookie(**TEST_COOKIE))
|
|
||||||
self.assertEqual(
|
|
||||||
downloader._make_cmd('test', TEST_INFO),
|
|
||||||
['http', '--download', '--output', 'test',
|
|
||||||
'http://www.example.com/', 'Cookie:' + TEST_COOKIE_VALUE])
|
|
||||||
|
|
||||||
|
|
||||||
@ifExternalFDAvailable(AxelFD)
|
|
||||||
class TestAxelFD(unittest.TestCase):
|
|
||||||
def test_make_cmd(self):
|
|
||||||
with FakeYDL() as ydl:
|
|
||||||
downloader = AxelFD(ydl, {})
|
|
||||||
self.assertEqual(
|
|
||||||
downloader._make_cmd('test', TEST_INFO),
|
|
||||||
['axel', '-o', 'test', '--', 'http://www.example.com/'])
|
|
||||||
|
|
||||||
# Test cookie header is added
|
|
||||||
ydl.cookiejar.set_cookie(cookiejar_Cookie(**TEST_COOKIE))
|
|
||||||
self.assertEqual(
|
|
||||||
downloader._make_cmd('test', TEST_INFO),
|
|
||||||
['axel', '-o', 'test', '-H', 'Cookie: ' + TEST_COOKIE_VALUE,
|
|
||||||
'--max-redirect=0', '--', 'http://www.example.com/'])
|
|
||||||
|
|
||||||
|
|
||||||
@ifExternalFDAvailable(WgetFD)
|
|
||||||
class TestWgetFD(unittest.TestCase):
|
|
||||||
def test_make_cmd(self):
|
|
||||||
with FakeYDL() as ydl:
|
|
||||||
downloader = WgetFD(ydl, {})
|
|
||||||
self.assertNotIn('--load-cookies', downloader._make_cmd('test', TEST_INFO))
|
|
||||||
# Test cookiejar tempfile arg is added
|
|
||||||
ydl.cookiejar.set_cookie(cookiejar_Cookie(**TEST_COOKIE))
|
|
||||||
self.assertIn('--load-cookies', downloader._make_cmd('test', TEST_INFO))
|
|
||||||
|
|
||||||
|
|
||||||
@ifExternalFDAvailable(CurlFD)
|
|
||||||
class TestCurlFD(unittest.TestCase):
|
|
||||||
def test_make_cmd(self):
|
|
||||||
with FakeYDL() as ydl:
|
|
||||||
downloader = CurlFD(ydl, {})
|
|
||||||
self.assertNotIn('--cookie', downloader._make_cmd('test', TEST_INFO))
|
|
||||||
# Test cookie header is added
|
|
||||||
ydl.cookiejar.set_cookie(cookiejar_Cookie(**TEST_COOKIE))
|
|
||||||
self.assertIn('--cookie', downloader._make_cmd('test', TEST_INFO))
|
|
||||||
self.assertIn(TEST_COOKIE_VALUE, downloader._make_cmd('test', TEST_INFO))
|
|
||||||
|
|
||||||
|
|
||||||
@ifExternalFDAvailable(Aria2cFD)
|
|
||||||
class TestAria2cFD(unittest.TestCase):
|
|
||||||
def test_make_cmd(self):
|
|
||||||
with FakeYDL() as ydl:
|
|
||||||
downloader = Aria2cFD(ydl, {})
|
|
||||||
downloader._make_cmd('test', TEST_INFO)
|
|
||||||
self.assertFalse(hasattr(downloader, '_cookies_tempfile'))
|
|
||||||
|
|
||||||
# Test cookiejar tempfile arg is added
|
|
||||||
ydl.cookiejar.set_cookie(cookiejar_Cookie(**TEST_COOKIE))
|
|
||||||
cmd = downloader._make_cmd('test', TEST_INFO)
|
|
||||||
self.assertIn('--load-cookies=%s' % downloader._cookies_tempfile, cmd)
|
|
||||||
|
|
||||||
|
|
||||||
# Handle delegated availability
|
|
||||||
def ifFFmpegFDAvailable(externalFD):
|
|
||||||
# raise SkipTest, or set False!
|
|
||||||
avail = ifExternalFDAvailable(externalFD) and False
|
|
||||||
with compat_contextlib_suppress(Exception):
|
|
||||||
avail = FFmpegPostProcessor(downloader=None).available
|
|
||||||
return unittest.skipUnless(
|
|
||||||
avail, externalFD.get_basename() + ' not found')
|
|
||||||
|
|
||||||
|
|
||||||
@ifFFmpegFDAvailable(FFmpegFD)
|
|
||||||
class TestFFmpegFD(unittest.TestCase):
|
|
||||||
_args = []
|
|
||||||
|
|
||||||
def _test_cmd(self, args):
|
|
||||||
self._args = args
|
|
||||||
|
|
||||||
def test_make_cmd(self):
|
|
||||||
with FakeYDL() as ydl:
|
|
||||||
downloader = FFmpegFD(ydl, {})
|
|
||||||
downloader._debug_cmd = self._test_cmd
|
|
||||||
info_dict = TEST_INFO.copy()
|
|
||||||
info_dict['ext'] = 'mp4'
|
|
||||||
|
|
||||||
downloader._call_downloader('test', info_dict)
|
|
||||||
self.assertEqual(self._args, [
|
|
||||||
'ffmpeg', '-y', '-i', 'http://www.example.com/',
|
|
||||||
'-c', 'copy', '-f', 'mp4', 'file:test'])
|
|
||||||
|
|
||||||
# Test cookies arg is added
|
|
||||||
ydl.cookiejar.set_cookie(cookiejar_Cookie(**TEST_COOKIE))
|
|
||||||
downloader._call_downloader('test', info_dict)
|
|
||||||
self.assertEqual(self._args, [
|
|
||||||
'ffmpeg', '-y', '-cookies', TEST_COOKIE_VALUE + '; path=/; domain=.example.com;\r\n',
|
|
||||||
'-i', 'http://www.example.com/', '-c', 'copy', '-f', 'mp4', 'file:test'])
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
@ -1,509 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
# coding: utf-8
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
# Allow direct execution
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import unittest
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
|
|
||||||
import re
|
|
||||||
|
|
||||||
from youtube_dl.traversal import (
|
|
||||||
dict_get,
|
|
||||||
get_first,
|
|
||||||
T,
|
|
||||||
traverse_obj,
|
|
||||||
)
|
|
||||||
from youtube_dl.compat import (
|
|
||||||
compat_etree_fromstring,
|
|
||||||
compat_http_cookies,
|
|
||||||
compat_str,
|
|
||||||
)
|
|
||||||
from youtube_dl.utils import (
|
|
||||||
int_or_none,
|
|
||||||
str_or_none,
|
|
||||||
)
|
|
||||||
|
|
||||||
_TEST_DATA = {
|
|
||||||
100: 100,
|
|
||||||
1.2: 1.2,
|
|
||||||
'str': 'str',
|
|
||||||
'None': None,
|
|
||||||
'...': Ellipsis,
|
|
||||||
'urls': [
|
|
||||||
{'index': 0, 'url': 'https://www.example.com/0'},
|
|
||||||
{'index': 1, 'url': 'https://www.example.com/1'},
|
|
||||||
],
|
|
||||||
'data': (
|
|
||||||
{'index': 2},
|
|
||||||
{'index': 3},
|
|
||||||
),
|
|
||||||
'dict': {},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
if sys.version_info < (3, 0):
|
|
||||||
class _TestCase(unittest.TestCase):
|
|
||||||
|
|
||||||
def assertCountEqual(self, *args, **kwargs):
|
|
||||||
return self.assertItemsEqual(*args, **kwargs)
|
|
||||||
else:
|
|
||||||
_TestCase = unittest.TestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestTraversal(_TestCase):
|
|
||||||
def assertMaybeCountEqual(self, *args, **kwargs):
|
|
||||||
if sys.version_info < (3, 7):
|
|
||||||
# random dict order
|
|
||||||
return self.assertCountEqual(*args, **kwargs)
|
|
||||||
else:
|
|
||||||
return self.assertEqual(*args, **kwargs)
|
|
||||||
|
|
||||||
def test_traverse_obj(self):
|
|
||||||
# instant compat
|
|
||||||
str = compat_str
|
|
||||||
|
|
||||||
# define a pukka Iterable
|
|
||||||
def iter_range(stop):
|
|
||||||
for from_ in range(stop):
|
|
||||||
yield from_
|
|
||||||
|
|
||||||
# Test base functionality
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, ('str',)), 'str',
|
|
||||||
msg='allow tuple path')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, ['str']), 'str',
|
|
||||||
msg='allow list path')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, (value for value in ("str",))), 'str',
|
|
||||||
msg='allow iterable path')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, 'str'), 'str',
|
|
||||||
msg='single items should be treated as a path')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, None), _TEST_DATA)
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, 100), 100)
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, 1.2), 1.2)
|
|
||||||
|
|
||||||
# Test Ellipsis behavior
|
|
||||||
self.assertCountEqual(traverse_obj(_TEST_DATA, Ellipsis),
|
|
||||||
(item for item in _TEST_DATA.values() if item not in (None, {})),
|
|
||||||
msg='`...` should give all non-discarded values')
|
|
||||||
self.assertCountEqual(traverse_obj(_TEST_DATA, ('urls', 0, Ellipsis)), _TEST_DATA['urls'][0].values(),
|
|
||||||
msg='`...` selection for dicts should select all values')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, (Ellipsis, Ellipsis, 'url')),
|
|
||||||
['https://www.example.com/0', 'https://www.example.com/1'],
|
|
||||||
msg='nested `...` queries should work')
|
|
||||||
self.assertCountEqual(traverse_obj(_TEST_DATA, (Ellipsis, Ellipsis, 'index')), iter_range(4),
|
|
||||||
msg='`...` query result should be flattened')
|
|
||||||
self.assertEqual(traverse_obj(iter(range(4)), Ellipsis), list(range(4)),
|
|
||||||
msg='`...` should accept iterables')
|
|
||||||
|
|
||||||
# Test function as key
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, lambda x, y: x == 'urls' and isinstance(y, list)),
|
|
||||||
[_TEST_DATA['urls']],
|
|
||||||
msg='function as query key should perform a filter based on (key, value)')
|
|
||||||
self.assertCountEqual(traverse_obj(_TEST_DATA, lambda _, x: isinstance(x[0], str)), set(('str',)),
|
|
||||||
msg='exceptions in the query function should be caught')
|
|
||||||
self.assertEqual(traverse_obj(iter(range(4)), lambda _, x: x % 2 == 0), [0, 2],
|
|
||||||
msg='function key should accept iterables')
|
|
||||||
if __debug__:
|
|
||||||
with self.assertRaises(Exception, msg='Wrong function signature should raise in debug'):
|
|
||||||
traverse_obj(_TEST_DATA, lambda a: Ellipsis)
|
|
||||||
with self.assertRaises(Exception, msg='Wrong function signature should raise in debug'):
|
|
||||||
traverse_obj(_TEST_DATA, lambda a, b, c: Ellipsis)
|
|
||||||
|
|
||||||
# Test set as key (transformation/type, like `expected_type`)
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, (Ellipsis, T(str.upper), )), ['STR'],
|
|
||||||
msg='Function in set should be a transformation')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, ('fail', T(lambda _: 'const'))), 'const',
|
|
||||||
msg='Function in set should always be called')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, (Ellipsis, T(str))), ['str'],
|
|
||||||
msg='Type in set should be a type filter')
|
|
||||||
self.assertMaybeCountEqual(traverse_obj(_TEST_DATA, (Ellipsis, T(str, int))), [100, 'str'],
|
|
||||||
msg='Multiple types in set should be a type filter')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, T(dict)), _TEST_DATA,
|
|
||||||
msg='A single set should be wrapped into a path')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, (Ellipsis, T(str.upper))), ['STR'],
|
|
||||||
msg='Transformation function should not raise')
|
|
||||||
self.assertMaybeCountEqual(traverse_obj(_TEST_DATA, (Ellipsis, T(str_or_none))),
|
|
||||||
[item for item in map(str_or_none, _TEST_DATA.values()) if item is not None],
|
|
||||||
msg='Function in set should be a transformation')
|
|
||||||
if __debug__:
|
|
||||||
with self.assertRaises(Exception, msg='Sets with length != 1 should raise in debug'):
|
|
||||||
traverse_obj(_TEST_DATA, set())
|
|
||||||
with self.assertRaises(Exception, msg='Sets with length != 1 should raise in debug'):
|
|
||||||
traverse_obj(_TEST_DATA, set((str.upper, str)))
|
|
||||||
|
|
||||||
# Test `slice` as a key
|
|
||||||
_SLICE_DATA = [0, 1, 2, 3, 4]
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, ('dict', slice(1))), None,
|
|
||||||
msg='slice on a dictionary should not throw')
|
|
||||||
self.assertEqual(traverse_obj(_SLICE_DATA, slice(1)), _SLICE_DATA[:1],
|
|
||||||
msg='slice key should apply slice to sequence')
|
|
||||||
self.assertEqual(traverse_obj(_SLICE_DATA, slice(1, 2)), _SLICE_DATA[1:2],
|
|
||||||
msg='slice key should apply slice to sequence')
|
|
||||||
self.assertEqual(traverse_obj(_SLICE_DATA, slice(1, 4, 2)), _SLICE_DATA[1:4:2],
|
|
||||||
msg='slice key should apply slice to sequence')
|
|
||||||
|
|
||||||
# Test alternative paths
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, 'fail', 'str'), 'str',
|
|
||||||
msg='multiple `paths` should be treated as alternative paths')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, 'str', 100), 'str',
|
|
||||||
msg='alternatives should exit early')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, 'fail', 'fail'), None,
|
|
||||||
msg='alternatives should return `default` if exhausted')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, (Ellipsis, 'fail'), 100), 100,
|
|
||||||
msg='alternatives should track their own branching return')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, ('dict', Ellipsis), ('data', Ellipsis)), list(_TEST_DATA['data']),
|
|
||||||
msg='alternatives on empty objects should search further')
|
|
||||||
|
|
||||||
# Test branch and path nesting
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', (3, 0), 'url')), ['https://www.example.com/0'],
|
|
||||||
msg='tuple as key should be treated as branches')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', [3, 0], 'url')), ['https://www.example.com/0'],
|
|
||||||
msg='list as key should be treated as branches')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', ((1, 'fail'), (0, 'url')))), ['https://www.example.com/0'],
|
|
||||||
msg='double nesting in path should be treated as paths')
|
|
||||||
self.assertEqual(traverse_obj(['0', [1, 2]], [(0, 1), 0]), [1],
|
|
||||||
msg='do not fail early on branching')
|
|
||||||
self.assertCountEqual(traverse_obj(_TEST_DATA, ('urls', ((1, ('fail', 'url')), (0, 'url')))),
|
|
||||||
['https://www.example.com/0', 'https://www.example.com/1'],
|
|
||||||
msg='triple nesting in path should be treated as branches')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', ('fail', (Ellipsis, 'url')))),
|
|
||||||
['https://www.example.com/0', 'https://www.example.com/1'],
|
|
||||||
msg='ellipsis as branch path start gets flattened')
|
|
||||||
|
|
||||||
# Test dictionary as key
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2}), {0: 100, 1: 1.2},
|
|
||||||
msg='dict key should result in a dict with the same keys')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', 0, 'url')}),
|
|
||||||
{0: 'https://www.example.com/0'},
|
|
||||||
msg='dict key should allow paths')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', (3, 0), 'url')}),
|
|
||||||
{0: ['https://www.example.com/0']},
|
|
||||||
msg='tuple in dict path should be treated as branches')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', ((1, 'fail'), (0, 'url')))}),
|
|
||||||
{0: ['https://www.example.com/0']},
|
|
||||||
msg='double nesting in dict path should be treated as paths')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', ((1, ('fail', 'url')), (0, 'url')))}),
|
|
||||||
{0: ['https://www.example.com/1', 'https://www.example.com/0']},
|
|
||||||
msg='triple nesting in dict path should be treated as branches')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'fail'}), {},
|
|
||||||
msg='remove `None` values when top level dict key fails')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'fail'}, default=Ellipsis), {0: Ellipsis},
|
|
||||||
msg='use `default` if key fails and `default`')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'dict'}), {},
|
|
||||||
msg='remove empty values when dict key')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'dict'}, default=Ellipsis), {0: Ellipsis},
|
|
||||||
msg='use `default` when dict key and a default')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 'fail'}}), {},
|
|
||||||
msg='remove empty values when nested dict key fails')
|
|
||||||
self.assertEqual(traverse_obj(None, {0: 'fail'}), {},
|
|
||||||
msg='default to dict if pruned')
|
|
||||||
self.assertEqual(traverse_obj(None, {0: 'fail'}, default=Ellipsis), {0: Ellipsis},
|
|
||||||
msg='default to dict if pruned and default is given')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 'fail'}}, default=Ellipsis), {0: {0: Ellipsis}},
|
|
||||||
msg='use nested `default` when nested dict key fails and `default`')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('dict', Ellipsis)}), {},
|
|
||||||
msg='remove key if branch in dict key not successful')
|
|
||||||
|
|
||||||
# Testing default parameter behavior
|
|
||||||
_DEFAULT_DATA = {'None': None, 'int': 0, 'list': []}
|
|
||||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail'), None,
|
|
||||||
msg='default value should be `None`')
|
|
||||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail', 'fail', default=Ellipsis), Ellipsis,
|
|
||||||
msg='chained fails should result in default')
|
|
||||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'None', 'int'), 0,
|
|
||||||
msg='should not short cirquit on `None`')
|
|
||||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail', default=1), 1,
|
|
||||||
msg='invalid dict key should result in `default`')
|
|
||||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'None', default=1), 1,
|
|
||||||
msg='`None` is a deliberate sentinel and should become `default`')
|
|
||||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, ('list', 10)), None,
|
|
||||||
msg='`IndexError` should result in `default`')
|
|
||||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, (Ellipsis, 'fail'), default=1), 1,
|
|
||||||
msg='if branched but not successful return `default` if defined, not `[]`')
|
|
||||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, (Ellipsis, 'fail'), default=None), None,
|
|
||||||
msg='if branched but not successful return `default` even if `default` is `None`')
|
|
||||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, (Ellipsis, 'fail')), [],
|
|
||||||
msg='if branched but not successful return `[]`, not `default`')
|
|
||||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, ('list', Ellipsis)), [],
|
|
||||||
msg='if branched but object is empty return `[]`, not `default`')
|
|
||||||
self.assertEqual(traverse_obj(None, Ellipsis), [],
|
|
||||||
msg='if branched but object is `None` return `[]`, not `default`')
|
|
||||||
self.assertEqual(traverse_obj({0: None}, (0, Ellipsis)), [],
|
|
||||||
msg='if branched but state is `None` return `[]`, not `default`')
|
|
||||||
|
|
||||||
branching_paths = [
|
|
||||||
('fail', Ellipsis),
|
|
||||||
(Ellipsis, 'fail'),
|
|
||||||
100 * ('fail',) + (Ellipsis,),
|
|
||||||
(Ellipsis,) + 100 * ('fail',),
|
|
||||||
]
|
|
||||||
for branching_path in branching_paths:
|
|
||||||
self.assertEqual(traverse_obj({}, branching_path), [],
|
|
||||||
msg='if branched but state is `None`, return `[]` (not `default`)')
|
|
||||||
self.assertEqual(traverse_obj({}, 'fail', branching_path), [],
|
|
||||||
msg='if branching in last alternative and previous did not match, return `[]` (not `default`)')
|
|
||||||
self.assertEqual(traverse_obj({0: 'x'}, 0, branching_path), 'x',
|
|
||||||
msg='if branching in last alternative and previous did match, return single value')
|
|
||||||
self.assertEqual(traverse_obj({0: 'x'}, branching_path, 0), 'x',
|
|
||||||
msg='if branching in first alternative and non-branching path does match, return single value')
|
|
||||||
self.assertEqual(traverse_obj({}, branching_path, 'fail'), None,
|
|
||||||
msg='if branching in first alternative and non-branching path does not match, return `default`')
|
|
||||||
|
|
||||||
# Testing expected_type behavior
|
|
||||||
_EXPECTED_TYPE_DATA = {'str': 'str', 'int': 0}
|
|
||||||
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=str),
|
|
||||||
'str', msg='accept matching `expected_type` type')
|
|
||||||
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=int),
|
|
||||||
None, msg='reject non-matching `expected_type` type')
|
|
||||||
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'int', expected_type=lambda x: str(x)),
|
|
||||||
'0', msg='transform type using type function')
|
|
||||||
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=lambda _: 1 / 0),
|
|
||||||
None, msg='wrap expected_type function in try_call')
|
|
||||||
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, Ellipsis, expected_type=str),
|
|
||||||
['str'], msg='eliminate items that expected_type fails on')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2}, expected_type=int),
|
|
||||||
{0: 100}, msg='type as expected_type should filter dict values')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2, 2: 'None'}, expected_type=str_or_none),
|
|
||||||
{0: '100', 1: '1.2'}, msg='function as expected_type should transform dict values')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, ({0: 1.2}, 0, set((int_or_none,))), expected_type=int),
|
|
||||||
1, msg='expected_type should not filter non-final dict values')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 100, 1: 'str'}}, expected_type=int),
|
|
||||||
{0: {0: 100}}, msg='expected_type should transform deep dict values')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, [({0: '...'}, {0: '...'})], expected_type=type(Ellipsis)),
|
|
||||||
[{0: Ellipsis}, {0: Ellipsis}], msg='expected_type should transform branched dict values')
|
|
||||||
self.assertEqual(traverse_obj({1: {3: 4}}, [(1, 2), 3], expected_type=int),
|
|
||||||
[4], msg='expected_type regression for type matching in tuple branching')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, ['data', Ellipsis], expected_type=int),
|
|
||||||
[], msg='expected_type regression for type matching in dict result')
|
|
||||||
|
|
||||||
# Test get_all behavior
|
|
||||||
_GET_ALL_DATA = {'key': [0, 1, 2]}
|
|
||||||
self.assertEqual(traverse_obj(_GET_ALL_DATA, ('key', Ellipsis), get_all=False), 0,
|
|
||||||
msg='if not `get_all`, return only first matching value')
|
|
||||||
self.assertEqual(traverse_obj(_GET_ALL_DATA, Ellipsis, get_all=False), [0, 1, 2],
|
|
||||||
msg='do not overflatten if not `get_all`')
|
|
||||||
|
|
||||||
# Test casesense behavior
|
|
||||||
_CASESENSE_DATA = {
|
|
||||||
'KeY': 'value0',
|
|
||||||
0: {
|
|
||||||
'KeY': 'value1',
|
|
||||||
0: {'KeY': 'value2'},
|
|
||||||
},
|
|
||||||
# FULLWIDTH LATIN CAPITAL LETTER K
|
|
||||||
'\uff2bey': 'value3',
|
|
||||||
}
|
|
||||||
self.assertEqual(traverse_obj(_CASESENSE_DATA, 'key'), None,
|
|
||||||
msg='dict keys should be case sensitive unless `casesense`')
|
|
||||||
self.assertEqual(traverse_obj(_CASESENSE_DATA, 'keY',
|
|
||||||
casesense=False), 'value0',
|
|
||||||
msg='allow non matching key case if `casesense`')
|
|
||||||
self.assertEqual(traverse_obj(_CASESENSE_DATA, '\uff4bey', # FULLWIDTH LATIN SMALL LETTER K
|
|
||||||
casesense=False), 'value3',
|
|
||||||
msg='allow non matching Unicode key case if `casesense`')
|
|
||||||
self.assertEqual(traverse_obj(_CASESENSE_DATA, (0, ('keY',)),
|
|
||||||
casesense=False), ['value1'],
|
|
||||||
msg='allow non matching key case in branch if `casesense`')
|
|
||||||
self.assertEqual(traverse_obj(_CASESENSE_DATA, (0, ((0, 'keY'),)),
|
|
||||||
casesense=False), ['value2'],
|
|
||||||
msg='allow non matching key case in branch path if `casesense`')
|
|
||||||
|
|
||||||
# Test traverse_string behavior
|
|
||||||
_TRAVERSE_STRING_DATA = {'str': 'str', 1.2: 1.2}
|
|
||||||
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', 0)), None,
|
|
||||||
msg='do not traverse into string if not `traverse_string`')
|
|
||||||
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', 0),
|
|
||||||
_traverse_string=True), 's',
|
|
||||||
msg='traverse into string if `traverse_string`')
|
|
||||||
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, (1.2, 1),
|
|
||||||
_traverse_string=True), '.',
|
|
||||||
msg='traverse into converted data if `traverse_string`')
|
|
||||||
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', Ellipsis),
|
|
||||||
_traverse_string=True), 'str',
|
|
||||||
msg='`...` should result in string (same value) if `traverse_string`')
|
|
||||||
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', slice(0, None, 2)),
|
|
||||||
_traverse_string=True), 'sr',
|
|
||||||
msg='`slice` should result in string if `traverse_string`')
|
|
||||||
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', lambda i, v: i or v == 's'),
|
|
||||||
_traverse_string=True), 'str',
|
|
||||||
msg='function should result in string if `traverse_string`')
|
|
||||||
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', (0, 2)),
|
|
||||||
_traverse_string=True), ['s', 'r'],
|
|
||||||
msg='branching should result in list if `traverse_string`')
|
|
||||||
self.assertEqual(traverse_obj({}, (0, Ellipsis), _traverse_string=True), [],
|
|
||||||
msg='branching should result in list if `traverse_string`')
|
|
||||||
self.assertEqual(traverse_obj({}, (0, lambda x, y: True), _traverse_string=True), [],
|
|
||||||
msg='branching should result in list if `traverse_string`')
|
|
||||||
self.assertEqual(traverse_obj({}, (0, slice(1)), _traverse_string=True), [],
|
|
||||||
msg='branching should result in list if `traverse_string`')
|
|
||||||
|
|
||||||
# Test re.Match as input obj
|
|
||||||
mobj = re.match(r'^0(12)(?P<group>3)(4)?$', '0123')
|
|
||||||
self.assertEqual(traverse_obj(mobj, Ellipsis), [x for x in mobj.groups() if x is not None],
|
|
||||||
msg='`...` on a `re.Match` should give its `groups()`')
|
|
||||||
self.assertEqual(traverse_obj(mobj, lambda k, _: k in (0, 2)), ['0123', '3'],
|
|
||||||
msg='function on a `re.Match` should give groupno, value starting at 0')
|
|
||||||
self.assertEqual(traverse_obj(mobj, 'group'), '3',
|
|
||||||
msg='str key on a `re.Match` should give group with that name')
|
|
||||||
self.assertEqual(traverse_obj(mobj, 2), '3',
|
|
||||||
msg='int key on a `re.Match` should give group with that name')
|
|
||||||
self.assertEqual(traverse_obj(mobj, 'gRoUp', casesense=False), '3',
|
|
||||||
msg='str key on a `re.Match` should respect casesense')
|
|
||||||
self.assertEqual(traverse_obj(mobj, 'fail'), None,
|
|
||||||
msg='failing str key on a `re.Match` should return `default`')
|
|
||||||
self.assertEqual(traverse_obj(mobj, 'gRoUpS', casesense=False), None,
|
|
||||||
msg='failing str key on a `re.Match` should return `default`')
|
|
||||||
self.assertEqual(traverse_obj(mobj, 8), None,
|
|
||||||
msg='failing int key on a `re.Match` should return `default`')
|
|
||||||
self.assertEqual(traverse_obj(mobj, lambda k, _: k in (0, 'group')), ['0123', '3'],
|
|
||||||
msg='function on a `re.Match` should give group name as well')
|
|
||||||
|
|
||||||
# Test xml.etree.ElementTree.Element as input obj
|
|
||||||
etree = compat_etree_fromstring('''<?xml version="1.0"?>
|
|
||||||
<data>
|
|
||||||
<country name="Liechtenstein">
|
|
||||||
<rank>1</rank>
|
|
||||||
<year>2008</year>
|
|
||||||
<gdppc>141100</gdppc>
|
|
||||||
<neighbor name="Austria" direction="E"/>
|
|
||||||
<neighbor name="Switzerland" direction="W"/>
|
|
||||||
</country>
|
|
||||||
<country name="Singapore">
|
|
||||||
<rank>4</rank>
|
|
||||||
<year>2011</year>
|
|
||||||
<gdppc>59900</gdppc>
|
|
||||||
<neighbor name="Malaysia" direction="N"/>
|
|
||||||
</country>
|
|
||||||
<country name="Panama">
|
|
||||||
<rank>68</rank>
|
|
||||||
<year>2011</year>
|
|
||||||
<gdppc>13600</gdppc>
|
|
||||||
<neighbor name="Costa Rica" direction="W"/>
|
|
||||||
<neighbor name="Colombia" direction="E"/>
|
|
||||||
</country>
|
|
||||||
</data>''')
|
|
||||||
self.assertEqual(traverse_obj(etree, ''), etree,
|
|
||||||
msg='empty str key should return the element itself')
|
|
||||||
self.assertEqual(traverse_obj(etree, 'country'), list(etree),
|
|
||||||
msg='str key should return all children with that tag name')
|
|
||||||
self.assertEqual(traverse_obj(etree, Ellipsis), list(etree),
|
|
||||||
msg='`...` as key should return all children')
|
|
||||||
self.assertEqual(traverse_obj(etree, lambda _, x: x[0].text == '4'), [etree[1]],
|
|
||||||
msg='function as key should get element as value')
|
|
||||||
self.assertEqual(traverse_obj(etree, lambda i, _: i == 1), [etree[1]],
|
|
||||||
msg='function as key should get index as key')
|
|
||||||
self.assertEqual(traverse_obj(etree, 0), etree[0],
|
|
||||||
msg='int key should return the nth child')
|
|
||||||
self.assertEqual(traverse_obj(etree, './/neighbor/@name'),
|
|
||||||
['Austria', 'Switzerland', 'Malaysia', 'Costa Rica', 'Colombia'],
|
|
||||||
msg='`@<attribute>` at end of path should give that attribute')
|
|
||||||
self.assertEqual(traverse_obj(etree, '//neighbor/@fail'), [None, None, None, None, None],
|
|
||||||
msg='`@<nonexistent>` at end of path should give `None`')
|
|
||||||
self.assertEqual(traverse_obj(etree, ('//neighbor/@', 2)), {'name': 'Malaysia', 'direction': 'N'},
|
|
||||||
msg='`@` should give the full attribute dict')
|
|
||||||
self.assertEqual(traverse_obj(etree, '//year/text()'), ['2008', '2011', '2011'],
|
|
||||||
msg='`text()` at end of path should give the inner text')
|
|
||||||
self.assertEqual(traverse_obj(etree, '//*[@direction]/@direction'), ['E', 'W', 'N', 'W', 'E'],
|
|
||||||
msg='full python xpath features should be supported')
|
|
||||||
self.assertEqual(traverse_obj(etree, (0, '@name')), 'Liechtenstein',
|
|
||||||
msg='special transformations should act on current element')
|
|
||||||
self.assertEqual(traverse_obj(etree, ('country', 0, Ellipsis, 'text()', T(int_or_none))), [1, 2008, 141100],
|
|
||||||
msg='special transformations should act on current element')
|
|
||||||
|
|
||||||
def test_traversal_unbranching(self):
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, [(100, 1.2), all]), [100, 1.2],
|
|
||||||
msg='`all` should give all results as list')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, [(100, 1.2), any]), 100,
|
|
||||||
msg='`any` should give the first result')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, [100, all]), [100],
|
|
||||||
msg='`all` should give list if non branching')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, [100, any]), 100,
|
|
||||||
msg='`any` should give single item if non branching')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, [('dict', 'None', 100), all]), [100],
|
|
||||||
msg='`all` should filter `None` and empty dict')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, [('dict', 'None', 100), any]), 100,
|
|
||||||
msg='`any` should filter `None` and empty dict')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, [{
|
|
||||||
'all': [('dict', 'None', 100, 1.2), all],
|
|
||||||
'any': [('dict', 'None', 100, 1.2), any],
|
|
||||||
}]), {'all': [100, 1.2], 'any': 100},
|
|
||||||
msg='`all`/`any` should apply to each dict path separately')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, [{
|
|
||||||
'all': [('dict', 'None', 100, 1.2), all],
|
|
||||||
'any': [('dict', 'None', 100, 1.2), any],
|
|
||||||
}], get_all=False), {'all': [100, 1.2], 'any': 100},
|
|
||||||
msg='`all`/`any` should apply to dict regardless of `get_all`')
|
|
||||||
self.assertIs(traverse_obj(_TEST_DATA, [('dict', 'None', 100, 1.2), all, T(float)]), None,
|
|
||||||
msg='`all` should reset branching status')
|
|
||||||
self.assertIs(traverse_obj(_TEST_DATA, [('dict', 'None', 100, 1.2), any, T(float)]), None,
|
|
||||||
msg='`any` should reset branching status')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, [('dict', 'None', 100, 1.2), all, Ellipsis, T(float)]), [1.2],
|
|
||||||
msg='`all` should allow further branching')
|
|
||||||
self.assertEqual(traverse_obj(_TEST_DATA, [('dict', 'None', 'urls', 'data'), any, Ellipsis, 'index']), [0, 1],
|
|
||||||
msg='`any` should allow further branching')
|
|
||||||
|
|
||||||
def test_traversal_morsel(self):
|
|
||||||
values = {
|
|
||||||
'expires': 'a',
|
|
||||||
'path': 'b',
|
|
||||||
'comment': 'c',
|
|
||||||
'domain': 'd',
|
|
||||||
'max-age': 'e',
|
|
||||||
'secure': 'f',
|
|
||||||
'httponly': 'g',
|
|
||||||
'version': 'h',
|
|
||||||
'samesite': 'i',
|
|
||||||
}
|
|
||||||
# SameSite added in Py3.8, breaks .update for 3.5-3.7
|
|
||||||
if sys.version_info < (3, 8):
|
|
||||||
del values['samesite']
|
|
||||||
morsel = compat_http_cookies.Morsel()
|
|
||||||
morsel.set(str('item_key'), 'item_value', 'coded_value')
|
|
||||||
morsel.update(values)
|
|
||||||
values['key'] = str('item_key')
|
|
||||||
values['value'] = 'item_value'
|
|
||||||
values = dict((str(k), v) for k, v in values.items())
|
|
||||||
# make test pass even without ordered dict
|
|
||||||
value_set = set(values.values())
|
|
||||||
|
|
||||||
for key, value in values.items():
|
|
||||||
self.assertEqual(traverse_obj(morsel, key), value,
|
|
||||||
msg='Morsel should provide access to all values')
|
|
||||||
self.assertEqual(set(traverse_obj(morsel, Ellipsis)), value_set,
|
|
||||||
msg='`...` should yield all values')
|
|
||||||
self.assertEqual(set(traverse_obj(morsel, lambda k, v: True)), value_set,
|
|
||||||
msg='function key should yield all values')
|
|
||||||
self.assertIs(traverse_obj(morsel, [(None,), any]), morsel,
|
|
||||||
msg='Morsel should not be implicitly changed to dict on usage')
|
|
||||||
|
|
||||||
def test_get_first(self):
|
|
||||||
self.assertEqual(get_first([{'a': None}, {'a': 'spam'}], 'a'), 'spam')
|
|
||||||
|
|
||||||
def test_dict_get(self):
|
|
||||||
FALSE_VALUES = {
|
|
||||||
'none': None,
|
|
||||||
'false': False,
|
|
||||||
'zero': 0,
|
|
||||||
'empty_string': '',
|
|
||||||
'empty_list': [],
|
|
||||||
}
|
|
||||||
d = FALSE_VALUES.copy()
|
|
||||||
d['a'] = 42
|
|
||||||
self.assertEqual(dict_get(d, 'a'), 42)
|
|
||||||
self.assertEqual(dict_get(d, 'b'), None)
|
|
||||||
self.assertEqual(dict_get(d, 'b', 42), 42)
|
|
||||||
self.assertEqual(dict_get(d, ('a', )), 42)
|
|
||||||
self.assertEqual(dict_get(d, ('b', 'a', )), 42)
|
|
||||||
self.assertEqual(dict_get(d, ('b', 'c', 'a', 'd', )), 42)
|
|
||||||
self.assertEqual(dict_get(d, ('b', 'c', )), None)
|
|
||||||
self.assertEqual(dict_get(d, ('b', 'c', ), 42), 42)
|
|
||||||
for key, false_value in FALSE_VALUES.items():
|
|
||||||
self.assertEqual(dict_get(d, ('b', 'c', key, )), None)
|
|
||||||
self.assertEqual(dict_get(d, ('b', 'c', key, ), skip_false_values=False), false_value)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
@ -1,26 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
# Allow direct execution
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import unittest
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
|
|
||||||
from youtube_dl.extractor import YoutubeIE
|
|
||||||
|
|
||||||
|
|
||||||
class TestYoutubeMisc(unittest.TestCase):
|
|
||||||
def test_youtube_extract(self):
|
|
||||||
assertExtractId = lambda url, id: self.assertEqual(YoutubeIE.extract_id(url), id)
|
|
||||||
assertExtractId('http://www.youtube.com/watch?&v=BaW_jenozKc', 'BaW_jenozKc')
|
|
||||||
assertExtractId('https://www.youtube.com/watch?&v=BaW_jenozKc', 'BaW_jenozKc')
|
|
||||||
assertExtractId('https://www.youtube.com/watch?feature=player_embedded&v=BaW_jenozKc', 'BaW_jenozKc')
|
|
||||||
assertExtractId('https://www.youtube.com/watch_popup?v=BaW_jenozKc', 'BaW_jenozKc')
|
|
||||||
assertExtractId('http://www.youtube.com/watch?v=BaW_jenozKcsharePLED17F32AD9753930', 'BaW_jenozKc')
|
|
||||||
assertExtractId('BaW_jenozKc', 'BaW_jenozKc')
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
@ -1,35 +0,0 @@
|
|||||||
<?xml version="1.0"?>
|
|
||||||
<!-- MPD file Generated with GPAC version 1.0.1-revrelease at 2021-11-27T20:53:11.690Z -->
|
|
||||||
<MPD xmlns="urn:mpeg:dash:schema:mpd:2011" minBufferTime="PT1.500S" type="static" mediaPresentationDuration="PT0H0M30.196S" maxSegmentDuration="PT0H0M10.027S" profiles="urn:mpeg:dash:profile:full:2011">
|
|
||||||
<ProgramInformation moreInformationURL="http://gpac.io">
|
|
||||||
<Title>manifest.mpd generated by GPAC</Title>
|
|
||||||
</ProgramInformation>
|
|
||||||
|
|
||||||
<Period duration="PT0H0M30.196S">
|
|
||||||
<AdaptationSet segmentAlignment="true" maxWidth="768" maxHeight="432" maxFrameRate="30000/1001" par="16:9" lang="und" startWithSAP="1">
|
|
||||||
<Representation id="1" mimeType="video/mp4" codecs="avc1.4D401E" width="768" height="432" frameRate="30000/1001" sar="1:1" bandwidth="526987">
|
|
||||||
<BaseURL>video_dashinit.mp4</BaseURL>
|
|
||||||
<SegmentList timescale="90000" duration="900000">
|
|
||||||
<Initialization range="0-881"/>
|
|
||||||
<SegmentURL mediaRange="882-876094" indexRange="882-925"/>
|
|
||||||
<SegmentURL mediaRange="876095-1466732" indexRange="876095-876138"/>
|
|
||||||
<SegmentURL mediaRange="1466733-1953615" indexRange="1466733-1466776"/>
|
|
||||||
<SegmentURL mediaRange="1953616-1994211" indexRange="1953616-1953659"/>
|
|
||||||
</SegmentList>
|
|
||||||
</Representation>
|
|
||||||
</AdaptationSet>
|
|
||||||
<AdaptationSet segmentAlignment="true" lang="und" startWithSAP="1">
|
|
||||||
<Representation id="2" mimeType="audio/mp4" codecs="mp4a.40.2" audioSamplingRate="48000" bandwidth="98096">
|
|
||||||
<AudioChannelConfiguration schemeIdUri="urn:mpeg:dash:23003:3:audio_channel_configuration:2011" value="2"/>
|
|
||||||
<BaseURL>audio_dashinit.mp4</BaseURL>
|
|
||||||
<SegmentList timescale="48000" duration="480000">
|
|
||||||
<Initialization range="0-752"/>
|
|
||||||
<SegmentURL mediaRange="753-124129" indexRange="753-796"/>
|
|
||||||
<SegmentURL mediaRange="124130-250544" indexRange="124130-124173"/>
|
|
||||||
<SegmentURL mediaRange="250545-374929" indexRange="250545-250588"/>
|
|
||||||
</SegmentList>
|
|
||||||
</Representation>
|
|
||||||
</AdaptationSet>
|
|
||||||
</Period>
|
|
||||||
</MPD>
|
|
||||||
|
|
@ -1,351 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<!-- Created with Unified Streaming Platform (version=1.10.18-20255) -->
|
|
||||||
<MPD
|
|
||||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
xmlns="urn:mpeg:dash:schema:mpd:2011"
|
|
||||||
xsi:schemaLocation="urn:mpeg:dash:schema:mpd:2011 http://standards.iso.org/ittf/PubliclyAvailableStandards/MPEG-DASH_schema_files/DASH-MPD.xsd"
|
|
||||||
type="static"
|
|
||||||
mediaPresentationDuration="PT14M48S"
|
|
||||||
maxSegmentDuration="PT1M"
|
|
||||||
minBufferTime="PT10S"
|
|
||||||
profiles="urn:mpeg:dash:profile:isoff-live:2011">
|
|
||||||
<Period
|
|
||||||
id="1"
|
|
||||||
duration="PT14M48S">
|
|
||||||
<BaseURL>dash/</BaseURL>
|
|
||||||
<AdaptationSet
|
|
||||||
id="1"
|
|
||||||
group="1"
|
|
||||||
contentType="audio"
|
|
||||||
segmentAlignment="true"
|
|
||||||
audioSamplingRate="48000"
|
|
||||||
mimeType="audio/mp4"
|
|
||||||
codecs="mp4a.40.2"
|
|
||||||
startWithSAP="1">
|
|
||||||
<AudioChannelConfiguration
|
|
||||||
schemeIdUri="urn:mpeg:dash:23003:3:audio_channel_configuration:2011"
|
|
||||||
value="2" />
|
|
||||||
<Role schemeIdUri="urn:mpeg:dash:role:2011" value="main" />
|
|
||||||
<SegmentTemplate
|
|
||||||
timescale="48000"
|
|
||||||
initialization="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$.dash"
|
|
||||||
media="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$-$Time$.dash">
|
|
||||||
<SegmentTimeline>
|
|
||||||
<S t="0" d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="96256" r="2" />
|
|
||||||
<S d="95232" />
|
|
||||||
<S d="3584" />
|
|
||||||
</SegmentTimeline>
|
|
||||||
</SegmentTemplate>
|
|
||||||
<Representation
|
|
||||||
id="audio=128001"
|
|
||||||
bandwidth="128001">
|
|
||||||
</Representation>
|
|
||||||
</AdaptationSet>
|
|
||||||
<AdaptationSet
|
|
||||||
id="2"
|
|
||||||
group="3"
|
|
||||||
contentType="text"
|
|
||||||
lang="en"
|
|
||||||
mimeType="application/mp4"
|
|
||||||
codecs="stpp"
|
|
||||||
startWithSAP="1">
|
|
||||||
<Role schemeIdUri="urn:mpeg:dash:role:2011" value="subtitle" />
|
|
||||||
<SegmentTemplate
|
|
||||||
timescale="1000"
|
|
||||||
initialization="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$.dash"
|
|
||||||
media="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$-$Time$.dash">
|
|
||||||
<SegmentTimeline>
|
|
||||||
<S t="0" d="60000" r="9" />
|
|
||||||
<S d="24000" />
|
|
||||||
</SegmentTimeline>
|
|
||||||
</SegmentTemplate>
|
|
||||||
<Representation
|
|
||||||
id="textstream_eng=1000"
|
|
||||||
bandwidth="1000">
|
|
||||||
</Representation>
|
|
||||||
</AdaptationSet>
|
|
||||||
<AdaptationSet
|
|
||||||
id="3"
|
|
||||||
group="2"
|
|
||||||
contentType="video"
|
|
||||||
par="960:409"
|
|
||||||
minBandwidth="100000"
|
|
||||||
maxBandwidth="4482000"
|
|
||||||
maxWidth="1689"
|
|
||||||
maxHeight="720"
|
|
||||||
segmentAlignment="true"
|
|
||||||
mimeType="video/mp4"
|
|
||||||
codecs="avc1.4D401F"
|
|
||||||
startWithSAP="1">
|
|
||||||
<Role schemeIdUri="urn:mpeg:dash:role:2011" value="main" />
|
|
||||||
<SegmentTemplate
|
|
||||||
timescale="12288"
|
|
||||||
initialization="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$.dash"
|
|
||||||
media="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$-$Time$.dash">
|
|
||||||
<SegmentTimeline>
|
|
||||||
<S t="0" d="24576" r="443" />
|
|
||||||
</SegmentTimeline>
|
|
||||||
</SegmentTemplate>
|
|
||||||
<Representation
|
|
||||||
id="video=100000"
|
|
||||||
bandwidth="100000"
|
|
||||||
width="336"
|
|
||||||
height="144"
|
|
||||||
sar="2880:2863"
|
|
||||||
scanType="progressive">
|
|
||||||
</Representation>
|
|
||||||
<Representation
|
|
||||||
id="video=326000"
|
|
||||||
bandwidth="326000"
|
|
||||||
width="562"
|
|
||||||
height="240"
|
|
||||||
sar="115200:114929"
|
|
||||||
scanType="progressive">
|
|
||||||
</Representation>
|
|
||||||
<Representation
|
|
||||||
id="video=698000"
|
|
||||||
bandwidth="698000"
|
|
||||||
width="844"
|
|
||||||
height="360"
|
|
||||||
sar="86400:86299"
|
|
||||||
scanType="progressive">
|
|
||||||
</Representation>
|
|
||||||
<Representation
|
|
||||||
id="video=1493000"
|
|
||||||
bandwidth="1493000"
|
|
||||||
width="1126"
|
|
||||||
height="480"
|
|
||||||
sar="230400:230267"
|
|
||||||
scanType="progressive">
|
|
||||||
</Representation>
|
|
||||||
<Representation
|
|
||||||
id="video=4482000"
|
|
||||||
bandwidth="4482000"
|
|
||||||
width="1688"
|
|
||||||
height="720"
|
|
||||||
sar="86400:86299"
|
|
||||||
scanType="progressive">
|
|
||||||
</Representation>
|
|
||||||
</AdaptationSet>
|
|
||||||
</Period>
|
|
||||||
</MPD>
|
|
@ -1,32 +0,0 @@
|
|||||||
<?xml version="1.0" ?>
|
|
||||||
<MPD xmlns="urn:mpeg:dash:schema:mpd:2011" profiles="urn:mpeg:dash:profile:isoff-live:2011" minBufferTime="PT10.01S" mediaPresentationDuration="PT30.097S" type="static">
|
|
||||||
<!-- Created with Bento4 mp4-dash.py, VERSION=2.0.0-639 -->
|
|
||||||
<Period>
|
|
||||||
<!-- Video -->
|
|
||||||
<AdaptationSet mimeType="video/mp4" segmentAlignment="true" startWithSAP="1" maxWidth="768" maxHeight="432">
|
|
||||||
<Representation id="video-avc1" codecs="avc1.4D401E" width="768" height="432" scanType="progressive" frameRate="30000/1001" bandwidth="699597">
|
|
||||||
<SegmentList timescale="1000" duration="10010">
|
|
||||||
<Initialization sourceURL="video-frag.mp4" range="36-746"/>
|
|
||||||
<SegmentURL media="video-frag.mp4" mediaRange="747-876117"/>
|
|
||||||
<SegmentURL media="video-frag.mp4" mediaRange="876118-1466913"/>
|
|
||||||
<SegmentURL media="video-frag.mp4" mediaRange="1466914-1953954"/>
|
|
||||||
<SegmentURL media="video-frag.mp4" mediaRange="1953955-1994652"/>
|
|
||||||
</SegmentList>
|
|
||||||
</Representation>
|
|
||||||
</AdaptationSet>
|
|
||||||
<!-- Audio -->
|
|
||||||
<AdaptationSet mimeType="audio/mp4" startWithSAP="1" segmentAlignment="true">
|
|
||||||
<Representation id="audio-und-mp4a.40.2" codecs="mp4a.40.2" bandwidth="98808" audioSamplingRate="48000">
|
|
||||||
<AudioChannelConfiguration schemeIdUri="urn:mpeg:mpegB:cicp:ChannelConfiguration" value="2"/>
|
|
||||||
<SegmentList timescale="1000" duration="10010">
|
|
||||||
<Initialization sourceURL="audio-frag.mp4" range="32-623"/>
|
|
||||||
<SegmentURL media="audio-frag.mp4" mediaRange="624-124199"/>
|
|
||||||
<SegmentURL media="audio-frag.mp4" mediaRange="124200-250303"/>
|
|
||||||
<SegmentURL media="audio-frag.mp4" mediaRange="250304-374365"/>
|
|
||||||
<SegmentURL media="audio-frag.mp4" mediaRange="374366-374836"/>
|
|
||||||
</SegmentList>
|
|
||||||
</Representation>
|
|
||||||
</AdaptationSet>
|
|
||||||
</Period>
|
|
||||||
</MPD>
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
@ -1,66 +0,0 @@
|
|||||||
# coding: utf-8
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
try:
|
|
||||||
import threading
|
|
||||||
except ImportError:
|
|
||||||
threading = None
|
|
||||||
|
|
||||||
from .common import FileDownloader
|
|
||||||
from ..downloader import get_suitable_downloader
|
|
||||||
from ..extractor.niconico import NiconicoIE
|
|
||||||
from ..utils import sanitized_Request
|
|
||||||
|
|
||||||
|
|
||||||
class NiconicoDmcFD(FileDownloader):
|
|
||||||
""" Downloading niconico douga from DMC with heartbeat """
|
|
||||||
|
|
||||||
FD_NAME = 'niconico_dmc'
|
|
||||||
|
|
||||||
def real_download(self, filename, info_dict):
|
|
||||||
self.to_screen('[%s] Downloading from DMC' % self.FD_NAME)
|
|
||||||
|
|
||||||
ie = NiconicoIE(self.ydl)
|
|
||||||
info_dict, heartbeat_info_dict = ie._get_heartbeat_info(info_dict)
|
|
||||||
|
|
||||||
fd = get_suitable_downloader(info_dict, params=self.params)(self.ydl, self.params)
|
|
||||||
for ph in self._progress_hooks:
|
|
||||||
fd.add_progress_hook(ph)
|
|
||||||
|
|
||||||
if not threading:
|
|
||||||
self.to_screen('[%s] Threading for Heartbeat not available' % self.FD_NAME)
|
|
||||||
return fd.real_download(filename, info_dict)
|
|
||||||
|
|
||||||
success = download_complete = False
|
|
||||||
timer = [None]
|
|
||||||
heartbeat_lock = threading.Lock()
|
|
||||||
heartbeat_url = heartbeat_info_dict['url']
|
|
||||||
heartbeat_data = heartbeat_info_dict['data'].encode()
|
|
||||||
heartbeat_interval = heartbeat_info_dict.get('interval', 30)
|
|
||||||
|
|
||||||
request = sanitized_Request(heartbeat_url, heartbeat_data)
|
|
||||||
|
|
||||||
def heartbeat():
|
|
||||||
try:
|
|
||||||
self.ydl.urlopen(request).read()
|
|
||||||
except Exception:
|
|
||||||
self.to_screen('[%s] Heartbeat failed' % self.FD_NAME)
|
|
||||||
|
|
||||||
with heartbeat_lock:
|
|
||||||
if not download_complete:
|
|
||||||
timer[0] = threading.Timer(heartbeat_interval, heartbeat)
|
|
||||||
timer[0].start()
|
|
||||||
|
|
||||||
heartbeat_info_dict['ping']()
|
|
||||||
self.to_screen('[%s] Heartbeat with %d second interval ...' % (self.FD_NAME, heartbeat_interval))
|
|
||||||
try:
|
|
||||||
heartbeat()
|
|
||||||
if type(fd).__name__ == 'HlsFD':
|
|
||||||
info_dict.update(ie._extract_m3u8_formats(info_dict['url'], info_dict['id'])[0])
|
|
||||||
success = fd.real_download(filename, info_dict)
|
|
||||||
finally:
|
|
||||||
if heartbeat_lock:
|
|
||||||
with heartbeat_lock:
|
|
||||||
timer[0].cancel()
|
|
||||||
download_complete = True
|
|
||||||
return success
|
|
@ -1,89 +0,0 @@
|
|||||||
# coding: utf-8
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from .common import InfoExtractor
|
|
||||||
from ..utils import (
|
|
||||||
clean_html,
|
|
||||||
dict_get,
|
|
||||||
get_element_by_class,
|
|
||||||
int_or_none,
|
|
||||||
unified_strdate,
|
|
||||||
url_or_none,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Alsace20TVIE(InfoExtractor):
|
|
||||||
_VALID_URL = r'https?://(?:www\.)?alsace20\.tv/(?:[\w-]+/)+[\w-]+-(?P<id>[\w]+)'
|
|
||||||
_TESTS = [{
|
|
||||||
'url': 'https://www.alsace20.tv/VOD/Actu/JT/Votre-JT-jeudi-3-fevrier-lyNHCXpYJh.html',
|
|
||||||
# 'md5': 'd91851bf9af73c0ad9b2cdf76c127fbb',
|
|
||||||
'info_dict': {
|
|
||||||
'id': 'lyNHCXpYJh',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'description': 'md5:fc0bc4a0692d3d2dba4524053de4c7b7',
|
|
||||||
'title': 'Votre JT du jeudi 3 février',
|
|
||||||
'upload_date': '20220203',
|
|
||||||
'thumbnail': r're:https?://.+\.jpg',
|
|
||||||
'duration': 1073,
|
|
||||||
'view_count': int,
|
|
||||||
},
|
|
||||||
'params': {
|
|
||||||
'format': 'bestvideo',
|
|
||||||
},
|
|
||||||
}]
|
|
||||||
|
|
||||||
def _extract_video(self, video_id, url=None):
|
|
||||||
info = self._download_json(
|
|
||||||
'https://www.alsace20.tv/visionneuse/visio_v9_js.php?key=%s&habillage=0&mode=html' % (video_id, ),
|
|
||||||
video_id) or {}
|
|
||||||
title = info['titre']
|
|
||||||
|
|
||||||
formats = []
|
|
||||||
for res, fmt_url in (info.get('files') or {}).items():
|
|
||||||
formats.extend(
|
|
||||||
self._extract_smil_formats(fmt_url, video_id, fatal=False)
|
|
||||||
if '/smil:_' in fmt_url
|
|
||||||
else self._extract_mpd_formats(fmt_url, video_id, mpd_id=res, fatal=False))
|
|
||||||
self._sort_formats(formats)
|
|
||||||
|
|
||||||
webpage = (url and self._download_webpage(url, video_id, fatal=False)) or ''
|
|
||||||
thumbnail = url_or_none(dict_get(info, ('image', 'preview', )) or self._og_search_thumbnail(webpage))
|
|
||||||
upload_date = self._search_regex(r'/(\d{6})_', thumbnail, 'upload_date', default=None)
|
|
||||||
upload_date = unified_strdate('20%s-%s-%s' % (upload_date[:2], upload_date[2:4], upload_date[4:])) if upload_date else None
|
|
||||||
return {
|
|
||||||
'id': video_id,
|
|
||||||
'title': title,
|
|
||||||
'formats': formats,
|
|
||||||
'description': clean_html(get_element_by_class('wysiwyg', webpage)),
|
|
||||||
'upload_date': upload_date,
|
|
||||||
'thumbnail': thumbnail,
|
|
||||||
'duration': int_or_none(self._og_search_property('video:duration', webpage) if webpage else None),
|
|
||||||
'view_count': int_or_none(info.get('nb_vues')),
|
|
||||||
}
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
video_id = self._match_id(url)
|
|
||||||
return self._extract_video(video_id, url)
|
|
||||||
|
|
||||||
|
|
||||||
class Alsace20TVEmbedIE(Alsace20TVIE):
|
|
||||||
_VALID_URL = r'https?://(?:www\.)?alsace20\.tv/emb/(?P<id>[\w]+)'
|
|
||||||
_TESTS = [{
|
|
||||||
'url': 'https://www.alsace20.tv/emb/lyNHCXpYJh',
|
|
||||||
# 'md5': 'd91851bf9af73c0ad9b2cdf76c127fbb',
|
|
||||||
'info_dict': {
|
|
||||||
'id': 'lyNHCXpYJh',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'Votre JT du jeudi 3 février',
|
|
||||||
'upload_date': '20220203',
|
|
||||||
'thumbnail': r're:https?://.+\.jpg',
|
|
||||||
'view_count': int,
|
|
||||||
},
|
|
||||||
'params': {
|
|
||||||
'format': 'bestvideo',
|
|
||||||
},
|
|
||||||
}]
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
video_id = self._match_id(url)
|
|
||||||
return self._extract_video(video_id)
|
|
@ -1,101 +0,0 @@
|
|||||||
# coding: utf-8
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from .common import InfoExtractor
|
|
||||||
from ..compat import (
|
|
||||||
compat_parse_qs,
|
|
||||||
compat_urllib_parse_urlparse,
|
|
||||||
)
|
|
||||||
from ..utils import (
|
|
||||||
float_or_none,
|
|
||||||
int_or_none,
|
|
||||||
parse_iso8601,
|
|
||||||
remove_start,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ArnesIE(InfoExtractor):
|
|
||||||
IE_NAME = 'video.arnes.si'
|
|
||||||
IE_DESC = 'Arnes Video'
|
|
||||||
_VALID_URL = r'https?://video\.arnes\.si/(?:[a-z]{2}/)?(?:watch|embed|api/(?:asset|public/video))/(?P<id>[0-9a-zA-Z]{12})'
|
|
||||||
_TESTS = [{
|
|
||||||
'url': 'https://video.arnes.si/watch/a1qrWTOQfVoU?t=10',
|
|
||||||
'md5': '4d0f4d0a03571b33e1efac25fd4a065d',
|
|
||||||
'info_dict': {
|
|
||||||
'id': 'a1qrWTOQfVoU',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'Linearna neodvisnost, definicija',
|
|
||||||
'description': 'Linearna neodvisnost, definicija',
|
|
||||||
'license': 'PRIVATE',
|
|
||||||
'creator': 'Polona Oblak',
|
|
||||||
'timestamp': 1585063725,
|
|
||||||
'upload_date': '20200324',
|
|
||||||
'channel': 'Polona Oblak',
|
|
||||||
'channel_id': 'q6pc04hw24cj',
|
|
||||||
'channel_url': 'https://video.arnes.si/?channel=q6pc04hw24cj',
|
|
||||||
'duration': 596.75,
|
|
||||||
'view_count': int,
|
|
||||||
'tags': ['linearna_algebra'],
|
|
||||||
'start_time': 10,
|
|
||||||
}
|
|
||||||
}, {
|
|
||||||
'url': 'https://video.arnes.si/api/asset/s1YjnV7hadlC/play.mp4',
|
|
||||||
'only_matching': True,
|
|
||||||
}, {
|
|
||||||
'url': 'https://video.arnes.si/embed/s1YjnV7hadlC',
|
|
||||||
'only_matching': True,
|
|
||||||
}, {
|
|
||||||
'url': 'https://video.arnes.si/en/watch/s1YjnV7hadlC',
|
|
||||||
'only_matching': True,
|
|
||||||
}, {
|
|
||||||
'url': 'https://video.arnes.si/embed/s1YjnV7hadlC?t=123&hideRelated=1',
|
|
||||||
'only_matching': True,
|
|
||||||
}, {
|
|
||||||
'url': 'https://video.arnes.si/api/public/video/s1YjnV7hadlC',
|
|
||||||
'only_matching': True,
|
|
||||||
}]
|
|
||||||
_BASE_URL = 'https://video.arnes.si'
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
video_id = self._match_id(url)
|
|
||||||
|
|
||||||
video = self._download_json(
|
|
||||||
self._BASE_URL + '/api/public/video/' + video_id, video_id)['data']
|
|
||||||
title = video['title']
|
|
||||||
|
|
||||||
formats = []
|
|
||||||
for media in (video.get('media') or []):
|
|
||||||
media_url = media.get('url')
|
|
||||||
if not media_url:
|
|
||||||
continue
|
|
||||||
formats.append({
|
|
||||||
'url': self._BASE_URL + media_url,
|
|
||||||
'format_id': remove_start(media.get('format'), 'FORMAT_'),
|
|
||||||
'format_note': media.get('formatTranslation'),
|
|
||||||
'width': int_or_none(media.get('width')),
|
|
||||||
'height': int_or_none(media.get('height')),
|
|
||||||
})
|
|
||||||
self._sort_formats(formats)
|
|
||||||
|
|
||||||
channel = video.get('channel') or {}
|
|
||||||
channel_id = channel.get('url')
|
|
||||||
thumbnail = video.get('thumbnailUrl')
|
|
||||||
|
|
||||||
return {
|
|
||||||
'id': video_id,
|
|
||||||
'title': title,
|
|
||||||
'formats': formats,
|
|
||||||
'thumbnail': self._BASE_URL + thumbnail,
|
|
||||||
'description': video.get('description'),
|
|
||||||
'license': video.get('license'),
|
|
||||||
'creator': video.get('author'),
|
|
||||||
'timestamp': parse_iso8601(video.get('creationTime')),
|
|
||||||
'channel': channel.get('name'),
|
|
||||||
'channel_id': channel_id,
|
|
||||||
'channel_url': self._BASE_URL + '/?channel=' + channel_id if channel_id else None,
|
|
||||||
'duration': float_or_none(video.get('duration'), 1000),
|
|
||||||
'view_count': int_or_none(video.get('views')),
|
|
||||||
'tags': video.get('hashtags'),
|
|
||||||
'start_time': int_or_none(compat_parse_qs(
|
|
||||||
compat_urllib_parse_urlparse(url).query).get('t', [None])[0]),
|
|
||||||
}
|
|
@ -1,37 +0,0 @@
|
|||||||
# coding: utf-8
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from .brightcove import BrightcoveNewIE
|
|
||||||
from ..utils import extract_attributes
|
|
||||||
|
|
||||||
|
|
||||||
class BandaiChannelIE(BrightcoveNewIE):
|
|
||||||
IE_NAME = 'bandaichannel'
|
|
||||||
_VALID_URL = r'https?://(?:www\.)?b-ch\.com/titles/(?P<id>\d+/\d+)'
|
|
||||||
_TESTS = [{
|
|
||||||
'url': 'https://www.b-ch.com/titles/514/001',
|
|
||||||
'md5': 'a0f2d787baa5729bed71108257f613a4',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '6128044564001',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'メタルファイターMIKU 第1話',
|
|
||||||
'timestamp': 1580354056,
|
|
||||||
'uploader_id': '5797077852001',
|
|
||||||
'upload_date': '20200130',
|
|
||||||
'duration': 1387.733,
|
|
||||||
},
|
|
||||||
'params': {
|
|
||||||
'format': 'bestvideo',
|
|
||||||
'skip_download': True,
|
|
||||||
},
|
|
||||||
}]
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
video_id = self._match_id(url)
|
|
||||||
webpage = self._download_webpage(url, video_id)
|
|
||||||
attrs = extract_attributes(self._search_regex(
|
|
||||||
r'(<video-js[^>]+\bid="bcplayer"[^>]*>)', webpage, 'player'))
|
|
||||||
bc = self._download_json(
|
|
||||||
'https://pbifcd.b-ch.com/v1/playbackinfo/ST/70/' + attrs['data-info'],
|
|
||||||
video_id, headers={'X-API-KEY': attrs['data-auth'].strip()})['bc']
|
|
||||||
return self._parse_brightcove_metadata(bc, bc['id'])
|
|
@ -1,59 +0,0 @@
|
|||||||
# coding: utf-8
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from .common import InfoExtractor
|
|
||||||
from ..utils import ExtractorError, urlencode_postdata
|
|
||||||
|
|
||||||
|
|
||||||
class BigoIE(InfoExtractor):
|
|
||||||
_VALID_URL = r'https?://(?:www\.)?bigo\.tv/(?:[a-z]{2,}/)?(?P<id>[^/]+)'
|
|
||||||
|
|
||||||
_TESTS = [{
|
|
||||||
'url': 'https://www.bigo.tv/ja/221338632',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '6576287577575737440',
|
|
||||||
'title': '土よ〜💁♂️ 休憩室/REST room',
|
|
||||||
'thumbnail': r're:https?://.+',
|
|
||||||
'uploader': '✨Shin💫',
|
|
||||||
'uploader_id': '221338632',
|
|
||||||
'is_live': True,
|
|
||||||
},
|
|
||||||
'skip': 'livestream',
|
|
||||||
}, {
|
|
||||||
'url': 'https://www.bigo.tv/th/Tarlerm1304',
|
|
||||||
'only_matching': True,
|
|
||||||
}, {
|
|
||||||
'url': 'https://bigo.tv/115976881',
|
|
||||||
'only_matching': True,
|
|
||||||
}]
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
user_id = self._match_id(url)
|
|
||||||
|
|
||||||
info_raw = self._download_json(
|
|
||||||
'https://bigo.tv/studio/getInternalStudioInfo',
|
|
||||||
user_id, data=urlencode_postdata({'siteId': user_id}))
|
|
||||||
|
|
||||||
if not isinstance(info_raw, dict):
|
|
||||||
raise ExtractorError('Received invalid JSON data')
|
|
||||||
if info_raw.get('code'):
|
|
||||||
raise ExtractorError(
|
|
||||||
'Bigo says: %s (code %s)' % (info_raw.get('msg'), info_raw.get('code')), expected=True)
|
|
||||||
info = info_raw.get('data') or {}
|
|
||||||
|
|
||||||
if not info.get('alive'):
|
|
||||||
raise ExtractorError('This user is offline.', expected=True)
|
|
||||||
|
|
||||||
return {
|
|
||||||
'id': info.get('roomId') or user_id,
|
|
||||||
'title': info.get('roomTopic') or info.get('nick_name') or user_id,
|
|
||||||
'formats': [{
|
|
||||||
'url': info.get('hls_src'),
|
|
||||||
'ext': 'mp4',
|
|
||||||
'protocol': 'm3u8',
|
|
||||||
}],
|
|
||||||
'thumbnail': info.get('snapshot'),
|
|
||||||
'uploader': info.get('nick_name'),
|
|
||||||
'uploader_id': user_id,
|
|
||||||
'is_live': True,
|
|
||||||
}
|
|
@ -1,173 +0,0 @@
|
|||||||
# coding: utf-8
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
from ..utils import (
|
|
||||||
strip_or_none,
|
|
||||||
traverse_obj,
|
|
||||||
)
|
|
||||||
from .common import InfoExtractor
|
|
||||||
|
|
||||||
|
|
||||||
class BlerpIE(InfoExtractor):
|
|
||||||
IE_NAME = 'blerp'
|
|
||||||
_VALID_URL = r'https?://(?:www\.)?blerp\.com/soundbites/(?P<id>[0-9a-zA-Z]+)'
|
|
||||||
_TESTS = [{
|
|
||||||
'url': 'https://blerp.com/soundbites/6320fe8745636cb4dd677a5a',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '6320fe8745636cb4dd677a5a',
|
|
||||||
'title': 'Samsung Galaxy S8 Over the Horizon Ringtone 2016',
|
|
||||||
'uploader': 'luminousaj',
|
|
||||||
'uploader_id': '5fb81e51aa66ae000c395478',
|
|
||||||
'ext': 'mp3',
|
|
||||||
'tags': ['samsung', 'galaxy', 's8', 'over the horizon', '2016', 'ringtone'],
|
|
||||||
}
|
|
||||||
}, {
|
|
||||||
'url': 'https://blerp.com/soundbites/5bc94ef4796001000498429f',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '5bc94ef4796001000498429f',
|
|
||||||
'title': 'Yee',
|
|
||||||
'uploader': '179617322678353920',
|
|
||||||
'uploader_id': '5ba99cf71386730004552c42',
|
|
||||||
'ext': 'mp3',
|
|
||||||
'tags': ['YEE', 'YEET', 'wo ha haah catchy tune yee', 'yee']
|
|
||||||
}
|
|
||||||
}]
|
|
||||||
|
|
||||||
_GRAPHQL_OPERATIONNAME = "webBitePageGetBite"
|
|
||||||
_GRAPHQL_QUERY = (
|
|
||||||
'''query webBitePageGetBite($_id: MongoID!) {
|
|
||||||
web {
|
|
||||||
biteById(_id: $_id) {
|
|
||||||
...bitePageFrag
|
|
||||||
__typename
|
|
||||||
}
|
|
||||||
__typename
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fragment bitePageFrag on Bite {
|
|
||||||
_id
|
|
||||||
title
|
|
||||||
userKeywords
|
|
||||||
keywords
|
|
||||||
color
|
|
||||||
visibility
|
|
||||||
isPremium
|
|
||||||
owned
|
|
||||||
price
|
|
||||||
extraReview
|
|
||||||
isAudioExists
|
|
||||||
image {
|
|
||||||
filename
|
|
||||||
original {
|
|
||||||
url
|
|
||||||
__typename
|
|
||||||
}
|
|
||||||
__typename
|
|
||||||
}
|
|
||||||
userReactions {
|
|
||||||
_id
|
|
||||||
reactions
|
|
||||||
createdAt
|
|
||||||
__typename
|
|
||||||
}
|
|
||||||
topReactions
|
|
||||||
totalSaveCount
|
|
||||||
saved
|
|
||||||
blerpLibraryType
|
|
||||||
license
|
|
||||||
licenseMetaData
|
|
||||||
playCount
|
|
||||||
totalShareCount
|
|
||||||
totalFavoriteCount
|
|
||||||
totalAddedToBoardCount
|
|
||||||
userCategory
|
|
||||||
userAudioQuality
|
|
||||||
audioCreationState
|
|
||||||
transcription
|
|
||||||
userTranscription
|
|
||||||
description
|
|
||||||
createdAt
|
|
||||||
updatedAt
|
|
||||||
author
|
|
||||||
listingType
|
|
||||||
ownerObject {
|
|
||||||
_id
|
|
||||||
username
|
|
||||||
profileImage {
|
|
||||||
filename
|
|
||||||
original {
|
|
||||||
url
|
|
||||||
__typename
|
|
||||||
}
|
|
||||||
__typename
|
|
||||||
}
|
|
||||||
__typename
|
|
||||||
}
|
|
||||||
transcription
|
|
||||||
favorited
|
|
||||||
visibility
|
|
||||||
isCurated
|
|
||||||
sourceUrl
|
|
||||||
audienceRating
|
|
||||||
strictAudienceRating
|
|
||||||
ownerId
|
|
||||||
reportObject {
|
|
||||||
reportedContentStatus
|
|
||||||
__typename
|
|
||||||
}
|
|
||||||
giphy {
|
|
||||||
mp4
|
|
||||||
gif
|
|
||||||
__typename
|
|
||||||
}
|
|
||||||
audio {
|
|
||||||
filename
|
|
||||||
original {
|
|
||||||
url
|
|
||||||
__typename
|
|
||||||
}
|
|
||||||
mp3 {
|
|
||||||
url
|
|
||||||
__typename
|
|
||||||
}
|
|
||||||
__typename
|
|
||||||
}
|
|
||||||
__typename
|
|
||||||
}
|
|
||||||
|
|
||||||
''')
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
audio_id = self._match_id(url)
|
|
||||||
|
|
||||||
data = {
|
|
||||||
'operationName': self._GRAPHQL_OPERATIONNAME,
|
|
||||||
'query': self._GRAPHQL_QUERY,
|
|
||||||
'variables': {
|
|
||||||
'_id': audio_id
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
headers = {
|
|
||||||
'Content-Type': 'application/json'
|
|
||||||
}
|
|
||||||
|
|
||||||
json_result = self._download_json('https://api.blerp.com/graphql',
|
|
||||||
audio_id, data=json.dumps(data).encode('utf-8'), headers=headers)
|
|
||||||
|
|
||||||
bite_json = json_result['data']['web']['biteById']
|
|
||||||
|
|
||||||
info_dict = {
|
|
||||||
'id': bite_json['_id'],
|
|
||||||
'url': bite_json['audio']['mp3']['url'],
|
|
||||||
'title': bite_json['title'],
|
|
||||||
'uploader': traverse_obj(bite_json, ('ownerObject', 'username'), expected_type=strip_or_none),
|
|
||||||
'uploader_id': traverse_obj(bite_json, ('ownerObject', '_id'), expected_type=strip_or_none),
|
|
||||||
'ext': 'mp3',
|
|
||||||
'tags': list(filter(None, map(strip_or_none, (traverse_obj(bite_json, 'userKeywords', expected_type=list) or []))) or None)
|
|
||||||
}
|
|
||||||
|
|
||||||
return info_dict
|
|
@ -0,0 +1,86 @@
|
|||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
from .common import InfoExtractor
|
||||||
|
from ..utils import (
|
||||||
|
remove_start,
|
||||||
|
int_or_none,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BlinkxIE(InfoExtractor):
|
||||||
|
_VALID_URL = r'(?:https?://(?:www\.)blinkx\.com/#?ce/|blinkx:)(?P<id>[^?]+)'
|
||||||
|
IE_NAME = 'blinkx'
|
||||||
|
|
||||||
|
_TEST = {
|
||||||
|
'url': 'http://www.blinkx.com/ce/Da0Gw3xc5ucpNduzLuDDlv4WC9PuI4fDi1-t6Y3LyfdY2SZS5Urbvn-UPJvrvbo8LTKTc67Wu2rPKSQDJyZeeORCR8bYkhs8lI7eqddznH2ofh5WEEdjYXnoRtj7ByQwt7atMErmXIeYKPsSDuMAAqJDlQZ-3Ff4HJVeH_s3Gh8oQ',
|
||||||
|
'md5': '337cf7a344663ec79bf93a526a2e06c7',
|
||||||
|
'info_dict': {
|
||||||
|
'id': 'Da0Gw3xc',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'title': 'No Daily Show for John Oliver; HBO Show Renewed - IGN News',
|
||||||
|
'uploader': 'IGN News',
|
||||||
|
'upload_date': '20150217',
|
||||||
|
'timestamp': 1424215740,
|
||||||
|
'description': 'HBO has renewed Last Week Tonight With John Oliver for two more seasons.',
|
||||||
|
'duration': 47.743333,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
def _real_extract(self, url):
|
||||||
|
video_id = self._match_id(url)
|
||||||
|
display_id = video_id[:8]
|
||||||
|
|
||||||
|
api_url = ('https://apib4.blinkx.com/api.php?action=play_video&'
|
||||||
|
+ 'video=%s' % video_id)
|
||||||
|
data_json = self._download_webpage(api_url, display_id)
|
||||||
|
data = json.loads(data_json)['api']['results'][0]
|
||||||
|
duration = None
|
||||||
|
thumbnails = []
|
||||||
|
formats = []
|
||||||
|
for m in data['media']:
|
||||||
|
if m['type'] == 'jpg':
|
||||||
|
thumbnails.append({
|
||||||
|
'url': m['link'],
|
||||||
|
'width': int(m['w']),
|
||||||
|
'height': int(m['h']),
|
||||||
|
})
|
||||||
|
elif m['type'] == 'original':
|
||||||
|
duration = float(m['d'])
|
||||||
|
elif m['type'] == 'youtube':
|
||||||
|
yt_id = m['link']
|
||||||
|
self.to_screen('Youtube video detected: %s' % yt_id)
|
||||||
|
return self.url_result(yt_id, 'Youtube', video_id=yt_id)
|
||||||
|
elif m['type'] in ('flv', 'mp4'):
|
||||||
|
vcodec = remove_start(m['vcodec'], 'ff')
|
||||||
|
acodec = remove_start(m['acodec'], 'ff')
|
||||||
|
vbr = int_or_none(m.get('vbr') or m.get('vbitrate'), 1000)
|
||||||
|
abr = int_or_none(m.get('abr') or m.get('abitrate'), 1000)
|
||||||
|
tbr = vbr + abr if vbr and abr else None
|
||||||
|
format_id = '%s-%sk-%s' % (vcodec, tbr, m['w'])
|
||||||
|
formats.append({
|
||||||
|
'format_id': format_id,
|
||||||
|
'url': m['link'],
|
||||||
|
'vcodec': vcodec,
|
||||||
|
'acodec': acodec,
|
||||||
|
'abr': abr,
|
||||||
|
'vbr': vbr,
|
||||||
|
'tbr': tbr,
|
||||||
|
'width': int_or_none(m.get('w')),
|
||||||
|
'height': int_or_none(m.get('h')),
|
||||||
|
})
|
||||||
|
|
||||||
|
self._sort_formats(formats)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'id': display_id,
|
||||||
|
'fullid': video_id,
|
||||||
|
'title': data['title'],
|
||||||
|
'formats': formats,
|
||||||
|
'uploader': data['channel_name'],
|
||||||
|
'timestamp': data['pubdate_epoch'],
|
||||||
|
'description': data.get('description'),
|
||||||
|
'thumbnails': thumbnails,
|
||||||
|
'duration': duration,
|
||||||
|
}
|
@ -1,79 +0,0 @@
|
|||||||
# coding: utf-8
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from .common import InfoExtractor
|
|
||||||
from ..utils import (
|
|
||||||
determine_ext,
|
|
||||||
int_or_none,
|
|
||||||
merge_dicts,
|
|
||||||
parse_iso8601,
|
|
||||||
T,
|
|
||||||
traverse_obj,
|
|
||||||
txt_or_none,
|
|
||||||
urljoin,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class CaffeineTVIE(InfoExtractor):
|
|
||||||
_VALID_URL = r'https?://(?:www\.)?caffeine\.tv/[^/]+/video/(?P<id>[0-9a-f-]+)'
|
|
||||||
_TESTS = [{
|
|
||||||
'url': 'https://www.caffeine.tv/TsuSurf/video/cffc0a00-e73f-11ec-8080-80017d29f26e',
|
|
||||||
'info_dict': {
|
|
||||||
'id': 'cffc0a00-e73f-11ec-8080-80017d29f26e',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'GOOOOD MORNINNNNN #highlights',
|
|
||||||
'timestamp': 1654702180,
|
|
||||||
'upload_date': '20220608',
|
|
||||||
'uploader': 'TsuSurf',
|
|
||||||
'duration': 3145,
|
|
||||||
'age_limit': 17,
|
|
||||||
},
|
|
||||||
'params': {
|
|
||||||
'format': 'bestvideo',
|
|
||||||
},
|
|
||||||
}]
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
video_id = self._match_id(url)
|
|
||||||
json_data = self._download_json(
|
|
||||||
'https://api.caffeine.tv/social/public/activity/' + video_id,
|
|
||||||
video_id)
|
|
||||||
broadcast_info = traverse_obj(json_data, ('broadcast_info', T(dict))) or {}
|
|
||||||
title = broadcast_info['broadcast_title']
|
|
||||||
video_url = broadcast_info['video_url']
|
|
||||||
|
|
||||||
ext = determine_ext(video_url)
|
|
||||||
if ext == 'm3u8':
|
|
||||||
formats = self._extract_m3u8_formats(
|
|
||||||
video_url, video_id, 'mp4', entry_protocol='m3u8',
|
|
||||||
fatal=False)
|
|
||||||
else:
|
|
||||||
formats = [{'url': video_url}]
|
|
||||||
self._sort_formats(formats)
|
|
||||||
|
|
||||||
return merge_dicts({
|
|
||||||
'id': video_id,
|
|
||||||
'title': title,
|
|
||||||
'formats': formats,
|
|
||||||
}, traverse_obj(json_data, {
|
|
||||||
'uploader': ((None, 'user'), 'username'),
|
|
||||||
}, get_all=False), traverse_obj(json_data, {
|
|
||||||
'like_count': ('like_count', T(int_or_none)),
|
|
||||||
'view_count': ('view_count', T(int_or_none)),
|
|
||||||
'comment_count': ('comment_count', T(int_or_none)),
|
|
||||||
'tags': ('tags', Ellipsis, T(txt_or_none)),
|
|
||||||
'is_live': 'is_live',
|
|
||||||
'uploader': ('user', 'name'),
|
|
||||||
}), traverse_obj(broadcast_info, {
|
|
||||||
'duration': ('content_duration', T(int_or_none)),
|
|
||||||
'timestamp': ('broadcast_start_time', T(parse_iso8601)),
|
|
||||||
'thumbnail': ('preview_image_path', T(lambda u: urljoin(url, u))),
|
|
||||||
'age_limit': ('content_rating', T(lambda r: r and {
|
|
||||||
# assume Apple Store ratings [1]
|
|
||||||
# 1. https://en.wikipedia.org/wiki/Mobile_software_content_rating_system
|
|
||||||
'FOUR_PLUS': 0,
|
|
||||||
'NINE_PLUS': 9,
|
|
||||||
'TWELVE_PLUS': 12,
|
|
||||||
'SEVENTEEN_PLUS': 17,
|
|
||||||
}.get(r, 17))),
|
|
||||||
}))
|
|
@ -1,113 +1,38 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import re
|
from .cbs import CBSBaseIE
|
||||||
|
|
||||||
# from .cbs import CBSBaseIE
|
|
||||||
from .common import InfoExtractor
|
|
||||||
from ..utils import (
|
|
||||||
int_or_none,
|
|
||||||
try_get,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
class CBSSportsIE(CBSBaseIE):
|
||||||
|
_VALID_URL = r'https?://(?:www\.)?cbssports\.com/[^/]+/(?:video|news)/(?P<id>[^/?#&]+)'
|
||||||
|
|
||||||
# class CBSSportsEmbedIE(CBSBaseIE):
|
|
||||||
class CBSSportsEmbedIE(InfoExtractor):
|
|
||||||
IE_NAME = 'cbssports:embed'
|
|
||||||
_VALID_URL = r'''(?ix)https?://(?:(?:www\.)?cbs|embed\.247)sports\.com/player/embed.+?
|
|
||||||
(?:
|
|
||||||
ids%3D(?P<id>[\da-f]{8}-(?:[\da-f]{4}-){3}[\da-f]{12})|
|
|
||||||
pcid%3D(?P<pcid>\d+)
|
|
||||||
)'''
|
|
||||||
_TESTS = [{
|
_TESTS = [{
|
||||||
'url': 'https://www.cbssports.com/player/embed/?args=player_id%3Db56c03a6-231a-4bbe-9c55-af3c8a8e9636%26ids%3Db56c03a6-231a-4bbe-9c55-af3c8a8e9636%26resizable%3D1%26autoplay%3Dtrue%26domain%3Dcbssports.com%26comp_ads_enabled%3Dfalse%26watchAndRead%3D0%26startTime%3D0%26env%3Dprod',
|
'url': 'https://www.cbssports.com/nba/video/donovan-mitchell-flashes-star-potential-in-game-2-victory-over-thunder/',
|
||||||
'only_matching': True,
|
'info_dict': {
|
||||||
|
'id': '1214315075735',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'title': 'Donovan Mitchell flashes star potential in Game 2 victory over Thunder',
|
||||||
|
'description': 'md5:df6f48622612c2d6bd2e295ddef58def',
|
||||||
|
'timestamp': 1524111457,
|
||||||
|
'upload_date': '20180419',
|
||||||
|
'uploader': 'CBSI-NEW',
|
||||||
|
},
|
||||||
|
'params': {
|
||||||
|
# m3u8 download
|
||||||
|
'skip_download': True,
|
||||||
|
}
|
||||||
}, {
|
}, {
|
||||||
'url': 'https://embed.247sports.com/player/embed/?args=%3fplayer_id%3d1827823171591%26channel%3dcollege-football-recruiting%26pcid%3d1827823171591%26width%3d640%26height%3d360%26autoplay%3dTrue%26comp_ads_enabled%3dFalse%26uvpc%3dhttps%253a%252f%252fwww.cbssports.com%252fapi%252fcontent%252fvideo%252fconfig%252f%253fcfg%253duvp_247sports_v4%2526partner%253d247%26uvpc_m%3dhttps%253a%252f%252fwww.cbssports.com%252fapi%252fcontent%252fvideo%252fconfig%252f%253fcfg%253duvp_247sports_m_v4%2526partner_m%253d247_mobile%26utag%3d247sportssite%26resizable%3dTrue',
|
'url': 'https://www.cbssports.com/nba/news/nba-playoffs-2018-watch-76ers-vs-heat-game-3-series-schedule-tv-channel-online-stream/',
|
||||||
'only_matching': True,
|
'only_matching': True,
|
||||||
}]
|
}]
|
||||||
|
|
||||||
# def _extract_video_info(self, filter_query, video_id):
|
def _extract_video_info(self, filter_query, video_id):
|
||||||
# return self._extract_feed_info('dJ5BDC', 'VxxJg8Ymh8sE', filter_query, video_id)
|
return self._extract_feed_info('dJ5BDC', 'VxxJg8Ymh8sE', filter_query, video_id)
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
uuid, pcid = re.match(self._VALID_URL, url).groups()
|
|
||||||
query = {'id': uuid} if uuid else {'pcid': pcid}
|
|
||||||
video = self._download_json(
|
|
||||||
'https://www.cbssports.com/api/content/video/',
|
|
||||||
uuid or pcid, query=query)[0]
|
|
||||||
video_id = video['id']
|
|
||||||
title = video['title']
|
|
||||||
metadata = video.get('metaData') or {}
|
|
||||||
# return self._extract_video_info('byId=%d' % metadata['mpxOutletId'], video_id)
|
|
||||||
# return self._extract_video_info('byGuid=' + metadata['mpxRefId'], video_id)
|
|
||||||
|
|
||||||
formats = self._extract_m3u8_formats(
|
|
||||||
metadata['files'][0]['url'], video_id, 'mp4',
|
|
||||||
'm3u8_native', m3u8_id='hls', fatal=False)
|
|
||||||
self._sort_formats(formats)
|
|
||||||
|
|
||||||
image = video.get('image')
|
|
||||||
thumbnails = None
|
|
||||||
if image:
|
|
||||||
image_path = image.get('path')
|
|
||||||
if image_path:
|
|
||||||
thumbnails = [{
|
|
||||||
'url': image_path,
|
|
||||||
'width': int_or_none(image.get('width')),
|
|
||||||
'height': int_or_none(image.get('height')),
|
|
||||||
'filesize': int_or_none(image.get('size')),
|
|
||||||
}]
|
|
||||||
|
|
||||||
return {
|
|
||||||
'id': video_id,
|
|
||||||
'title': title,
|
|
||||||
'formats': formats,
|
|
||||||
'thumbnails': thumbnails,
|
|
||||||
'description': video.get('description'),
|
|
||||||
'timestamp': int_or_none(try_get(video, lambda x: x['dateCreated']['epoch'])),
|
|
||||||
'duration': int_or_none(metadata.get('duration')),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class CBSSportsBaseIE(InfoExtractor):
|
|
||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
display_id = self._match_id(url)
|
display_id = self._match_id(url)
|
||||||
webpage = self._download_webpage(url, display_id)
|
webpage = self._download_webpage(url, display_id)
|
||||||
iframe_url = self._search_regex(
|
video_id = self._search_regex(
|
||||||
r'<iframe[^>]+(?:data-)?src="(https?://[^/]+/player/embed[^"]+)"',
|
[r'(?:=|%26)pcid%3D(\d+)', r'embedVideo(?:Container)?_(\d+)'],
|
||||||
webpage, 'embed url')
|
webpage, 'video id')
|
||||||
return self.url_result(iframe_url, CBSSportsEmbedIE.ie_key())
|
return self._extract_video_info('byId=%s' % video_id, video_id)
|
||||||
|
|
||||||
|
|
||||||
class CBSSportsIE(CBSSportsBaseIE):
|
|
||||||
IE_NAME = 'cbssports'
|
|
||||||
_VALID_URL = r'https?://(?:www\.)?cbssports\.com/[^/]+/video/(?P<id>[^/?#&]+)'
|
|
||||||
_TESTS = [{
|
|
||||||
'url': 'https://www.cbssports.com/college-football/video/cover-3-stanford-spring-gleaning/',
|
|
||||||
'info_dict': {
|
|
||||||
'id': 'b56c03a6-231a-4bbe-9c55-af3c8a8e9636',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'Cover 3: Stanford Spring Gleaning',
|
|
||||||
'description': 'The Cover 3 crew break down everything you need to know about the Stanford Cardinal this spring.',
|
|
||||||
'timestamp': 1617218398,
|
|
||||||
'upload_date': '20210331',
|
|
||||||
'duration': 502,
|
|
||||||
},
|
|
||||||
}]
|
|
||||||
|
|
||||||
|
|
||||||
class TwentyFourSevenSportsIE(CBSSportsBaseIE):
|
|
||||||
IE_NAME = '247sports'
|
|
||||||
_VALID_URL = r'https?://(?:www\.)?247sports\.com/Video/(?:[^/?#&]+-)?(?P<id>\d+)'
|
|
||||||
_TESTS = [{
|
|
||||||
'url': 'https://247sports.com/Video/2021-QB-Jake-Garcia-senior-highlights-through-five-games-10084854/',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '4f1265cb-c3b5-44a8-bb1d-1914119a0ccc',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': '2021 QB Jake Garcia senior highlights through five games',
|
|
||||||
'description': 'md5:8cb67ebed48e2e6adac1701e0ff6e45b',
|
|
||||||
'timestamp': 1607114223,
|
|
||||||
'upload_date': '20201204',
|
|
||||||
'duration': 208,
|
|
||||||
},
|
|
||||||
}]
|
|
||||||
|
@ -1,69 +0,0 @@
|
|||||||
# coding: utf-8
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from .common import InfoExtractor
|
|
||||||
from ..compat import compat_str
|
|
||||||
from ..utils import (
|
|
||||||
ExtractorError,
|
|
||||||
merge_dicts,
|
|
||||||
T,
|
|
||||||
traverse_obj,
|
|
||||||
unified_timestamp,
|
|
||||||
url_or_none,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ClipchampIE(InfoExtractor):
|
|
||||||
_VALID_URL = r'https?://(?:www\.)?clipchamp\.com/watch/(?P<id>[\w-]+)'
|
|
||||||
_TESTS = [{
|
|
||||||
'url': 'https://clipchamp.com/watch/gRXZ4ZhdDaU',
|
|
||||||
'info_dict': {
|
|
||||||
'id': 'gRXZ4ZhdDaU',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'Untitled video',
|
|
||||||
'uploader': 'Alexander Schwartz',
|
|
||||||
'timestamp': 1680805580,
|
|
||||||
'upload_date': '20230406',
|
|
||||||
'thumbnail': r're:^https?://.+\.jpg',
|
|
||||||
},
|
|
||||||
'params': {
|
|
||||||
'skip_download': 'm3u8',
|
|
||||||
'format': 'bestvideo',
|
|
||||||
},
|
|
||||||
}]
|
|
||||||
|
|
||||||
_STREAM_URL_TMPL = 'https://%s.cloudflarestream.com/%s/manifest/video.%s'
|
|
||||||
_STREAM_URL_QUERY = {'parentOrigin': 'https://clipchamp.com'}
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
video_id = self._match_id(url)
|
|
||||||
webpage = self._download_webpage(url, video_id)
|
|
||||||
data = self._search_nextjs_data(webpage, video_id)['props']['pageProps']['video']
|
|
||||||
|
|
||||||
storage_location = data.get('storage_location')
|
|
||||||
if storage_location != 'cf_stream':
|
|
||||||
raise ExtractorError('Unsupported clip storage location "%s"' % (storage_location,))
|
|
||||||
|
|
||||||
path = data['download_url']
|
|
||||||
iframe = self._download_webpage(
|
|
||||||
'https://iframe.cloudflarestream.com/' + path, video_id, 'Downloading player iframe')
|
|
||||||
subdomain = self._search_regex(
|
|
||||||
r'''\bcustomer-domain-prefix\s*=\s*("|')(?P<sd>[\w-]+)\1''', iframe,
|
|
||||||
'subdomain', group='sd', fatal=False) or 'customer-2ut9yn3y6fta1yxe'
|
|
||||||
|
|
||||||
formats = self._extract_mpd_formats(
|
|
||||||
self._STREAM_URL_TMPL % (subdomain, path, 'mpd'), video_id,
|
|
||||||
query=self._STREAM_URL_QUERY, fatal=False, mpd_id='dash')
|
|
||||||
formats.extend(self._extract_m3u8_formats(
|
|
||||||
self._STREAM_URL_TMPL % (subdomain, path, 'm3u8'), video_id, 'mp4',
|
|
||||||
query=self._STREAM_URL_QUERY, fatal=False, m3u8_id='hls'))
|
|
||||||
|
|
||||||
return merge_dicts({
|
|
||||||
'id': video_id,
|
|
||||||
'formats': formats,
|
|
||||||
'uploader': ' '.join(traverse_obj(data, ('creator', ('first_name', 'last_name'), T(compat_str)))) or None,
|
|
||||||
}, traverse_obj(data, {
|
|
||||||
'title': ('project', 'project_name', T(compat_str)),
|
|
||||||
'timestamp': ('created_at', T(unified_timestamp)),
|
|
||||||
'thumbnail': ('thumbnail_url', T(url_or_none)),
|
|
||||||
}), rev=True)
|
|
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue