mirror of https://github.com/yt-dlp/yt-dlp
Merge branch 'master' into fix/pp-embedthumbnail
commit
62d66dee80
@ -0,0 +1,28 @@
|
||||
self-hosted-runner:
|
||||
labels:
|
||||
# Workaround for the outdated runner list in actionlint v1.7.7
|
||||
# Ref: https://github.com/rhysd/actionlint/issues/533
|
||||
- windows-11-arm
|
||||
|
||||
config-variables:
|
||||
- KEEP_CACHE_WARM
|
||||
- PUSH_VERSION_COMMIT
|
||||
- UPDATE_TO_VERIFICATION
|
||||
- PYPI_PROJECT
|
||||
- PYPI_SUFFIX
|
||||
- NIGHTLY_PYPI_PROJECT
|
||||
- NIGHTLY_PYPI_SUFFIX
|
||||
- NIGHTLY_ARCHIVE_REPO
|
||||
- BUILD_NIGHTLY
|
||||
- MASTER_PYPI_PROJECT
|
||||
- MASTER_PYPI_SUFFIX
|
||||
- MASTER_ARCHIVE_REPO
|
||||
- BUILD_MASTER
|
||||
- ISSUE_LOCKDOWN
|
||||
- SANITIZE_COMMENT
|
||||
|
||||
paths:
|
||||
.github/workflows/build.yml:
|
||||
ignore:
|
||||
# SC1090 "Can't follow non-constant source": ignore when using `source` to activate venv
|
||||
- '.+SC1090.+'
|
||||
@ -0,0 +1,23 @@
|
||||
name: Keep cache warm
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 22 1,6,11,16,21,27 * *'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: |
|
||||
vars.KEEP_CACHE_WARM || github.event_name == 'workflow_dispatch'
|
||||
uses: ./.github/workflows/build.yml
|
||||
with:
|
||||
version: '999999'
|
||||
channel: stable
|
||||
origin: ${{ github.repository }}
|
||||
unix: false
|
||||
linux: false
|
||||
linux_armv7l: true
|
||||
musllinux: false
|
||||
macos: true
|
||||
windows: true
|
||||
permissions:
|
||||
contents: read
|
||||
@ -0,0 +1,41 @@
|
||||
name: Signature Tests
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- .github/workflows/signature-tests.yml
|
||||
- test/test_youtube_signature.py
|
||||
- yt_dlp/jsinterp.py
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/signature-tests.yml
|
||||
- test/test_youtube_signature.py
|
||||
- yt_dlp/jsinterp.py
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: signature-tests-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: Signature Tests
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
python-version: ['3.9', '3.10', '3.11', '3.12', '3.13', '3.14-dev', pypy-3.11]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install test requirements
|
||||
run: python3 ./devscripts/install_deps.py --only-optional --include test
|
||||
- name: Run tests
|
||||
timeout-minutes: 15
|
||||
run: |
|
||||
python3 -m yt_dlp -v || true # Print debug head
|
||||
python3 ./devscripts/run_tests.py test/test_youtube_signature.py
|
||||
@ -0,0 +1,52 @@
|
||||
name: Test and lint workflows
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- .github/workflows/*
|
||||
- bundle/docker/linux/*.sh
|
||||
- devscripts/setup_variables.py
|
||||
- devscripts/setup_variables_tests.py
|
||||
- devscripts/utils.py
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/*
|
||||
- bundle/docker/linux/*.sh
|
||||
- devscripts/setup_variables.py
|
||||
- devscripts/setup_variables_tests.py
|
||||
- devscripts/utils.py
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
ACTIONLINT_VERSION: "1.7.7"
|
||||
ACTIONLINT_SHA256SUM: 023070a287cd8cccd71515fedc843f1985bf96c436b7effaecce67290e7e0757
|
||||
ACTIONLINT_REPO: https://github.com/rhysd/actionlint
|
||||
|
||||
jobs:
|
||||
check:
|
||||
name: Check workflows
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10" # Keep this in sync with release.yml's prepare job
|
||||
- name: Install requirements
|
||||
env:
|
||||
ACTIONLINT_TARBALL: ${{ format('actionlint_{0}_linux_amd64.tar.gz', env.ACTIONLINT_VERSION) }}
|
||||
run: |
|
||||
python -m devscripts.install_deps -o --include test
|
||||
sudo apt -y install shellcheck
|
||||
python -m pip install -U pyflakes
|
||||
curl -LO "${ACTIONLINT_REPO}/releases/download/v${ACTIONLINT_VERSION}/${ACTIONLINT_TARBALL}"
|
||||
printf '%s %s' "${ACTIONLINT_SHA256SUM}" "${ACTIONLINT_TARBALL}" | sha256sum -c -
|
||||
tar xvzf "${ACTIONLINT_TARBALL}" actionlint
|
||||
chmod +x actionlint
|
||||
- name: Run actionlint
|
||||
run: |
|
||||
./actionlint -color
|
||||
- name: Check Docker shell scripts
|
||||
run: |
|
||||
shellcheck bundle/docker/linux/*.sh
|
||||
- name: Test GHA devscripts
|
||||
run: |
|
||||
pytest -Werror --tb=short devscripts/setup_variables_tests.py
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,10 +1,178 @@
|
||||
services:
|
||||
static:
|
||||
build: static
|
||||
|
||||
linux_x86_64:
|
||||
build:
|
||||
context: linux
|
||||
target: build
|
||||
platforms:
|
||||
- "linux/amd64"
|
||||
args:
|
||||
BUILDIMAGE: ghcr.io/yt-dlp/manylinux2014_x86_64-shared:latest
|
||||
environment:
|
||||
channel: ${channel}
|
||||
origin: ${origin}
|
||||
version: ${version}
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ~/build:/build
|
||||
- ../..:/yt-dlp
|
||||
|
||||
linux_x86_64_verify:
|
||||
build:
|
||||
context: linux
|
||||
target: verify
|
||||
platforms:
|
||||
- "linux/amd64"
|
||||
args:
|
||||
VERIFYIMAGE: quay.io/pypa/manylinux2014_x86_64:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
linux_aarch64:
|
||||
build:
|
||||
context: linux
|
||||
target: build
|
||||
platforms:
|
||||
- "linux/arm64"
|
||||
args:
|
||||
BUILDIMAGE: ghcr.io/yt-dlp/manylinux2014_aarch64-shared:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../..:/yt-dlp
|
||||
|
||||
linux_aarch64_verify:
|
||||
build:
|
||||
context: linux
|
||||
target: verify
|
||||
platforms:
|
||||
- "linux/arm64"
|
||||
args:
|
||||
VERIFYIMAGE: quay.io/pypa/manylinux2014_aarch64:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
linux_armv7l:
|
||||
build:
|
||||
context: linux
|
||||
target: build
|
||||
platforms:
|
||||
- "linux/arm/v7"
|
||||
args:
|
||||
BUILDIMAGE: ghcr.io/yt-dlp/manylinux_2_31_armv7l-shared:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../..:/yt-dlp
|
||||
- ../../venv:/yt-dlp-build-venv
|
||||
|
||||
linux_armv7l_verify:
|
||||
build:
|
||||
context: linux
|
||||
target: verify
|
||||
platforms:
|
||||
- "linux/arm/v7"
|
||||
args:
|
||||
VERIFYIMAGE: arm32v7/debian:bullseye
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
musllinux_x86_64:
|
||||
build:
|
||||
context: linux
|
||||
target: build
|
||||
platforms:
|
||||
- "linux/amd64"
|
||||
args:
|
||||
BUILDIMAGE: ghcr.io/yt-dlp/musllinux_1_2_x86_64-shared:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../..:/yt-dlp
|
||||
|
||||
musllinux_x86_64_verify:
|
||||
build:
|
||||
context: linux
|
||||
target: verify
|
||||
platforms:
|
||||
- "linux/amd64"
|
||||
args:
|
||||
VERIFYIMAGE: alpine:3.22
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
musllinux_aarch64:
|
||||
build:
|
||||
context: linux
|
||||
target: build
|
||||
platforms:
|
||||
- "linux/arm64"
|
||||
args:
|
||||
BUILDIMAGE: ghcr.io/yt-dlp/musllinux_1_2_aarch64-shared:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
EXCLUDE_CURL_CFFI: "1"
|
||||
volumes:
|
||||
- ../..:/yt-dlp
|
||||
|
||||
musllinux_aarch64_verify:
|
||||
build:
|
||||
context: linux
|
||||
target: verify
|
||||
platforms:
|
||||
- "linux/arm64"
|
||||
args:
|
||||
VERIFYIMAGE: alpine:3.22
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
@ -0,0 +1,16 @@
|
||||
ARG BUILDIMAGE=ghcr.io/yt-dlp/manylinux2014_x86_64-shared:latest
|
||||
ARG VERIFYIMAGE=alpine:3.22
|
||||
|
||||
|
||||
FROM $BUILDIMAGE AS build
|
||||
|
||||
WORKDIR /yt-dlp
|
||||
COPY build.sh /build.sh
|
||||
ENTRYPOINT ["/build.sh"]
|
||||
|
||||
|
||||
FROM $VERIFYIMAGE AS verify
|
||||
|
||||
WORKDIR /testing
|
||||
COPY verify.sh /verify.sh
|
||||
ENTRYPOINT ["/verify.sh"]
|
||||
@ -0,0 +1,48 @@
|
||||
#!/bin/bash
|
||||
set -exuo pipefail
|
||||
|
||||
if [[ -z "${PYTHON_VERSION:-}" ]]; then
|
||||
PYTHON_VERSION="3.13"
|
||||
echo "Defaulting to using Python ${PYTHON_VERSION}"
|
||||
fi
|
||||
|
||||
function runpy {
|
||||
"/opt/shared-cpython-${PYTHON_VERSION}/bin/python${PYTHON_VERSION}" "$@"
|
||||
}
|
||||
|
||||
function venvpy {
|
||||
"python${PYTHON_VERSION}" "$@"
|
||||
}
|
||||
|
||||
INCLUDES=(
|
||||
--include pyinstaller
|
||||
--include secretstorage
|
||||
)
|
||||
|
||||
if [[ -z "${EXCLUDE_CURL_CFFI:-}" ]]; then
|
||||
INCLUDES+=(--include curl-cffi)
|
||||
fi
|
||||
|
||||
runpy -m venv /yt-dlp-build-venv
|
||||
# shellcheck disable=SC1091
|
||||
source /yt-dlp-build-venv/bin/activate
|
||||
# Inside the venv we use venvpy instead of runpy
|
||||
venvpy -m ensurepip --upgrade --default-pip
|
||||
venvpy -m devscripts.install_deps -o --include build
|
||||
venvpy -m devscripts.install_deps "${INCLUDES[@]}"
|
||||
venvpy -m devscripts.make_lazy_extractors
|
||||
venvpy devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
|
||||
|
||||
if [[ -z "${SKIP_ONEDIR_BUILD:-}" ]]; then
|
||||
mkdir -p /build
|
||||
venvpy -m bundle.pyinstaller --onedir --distpath=/build
|
||||
pushd "/build/${EXE_NAME}"
|
||||
chmod +x "${EXE_NAME}"
|
||||
venvpy -m zipfile -c "/yt-dlp/dist/${EXE_NAME}.zip" ./
|
||||
popd
|
||||
fi
|
||||
|
||||
if [[ -z "${SKIP_ONEFILE_BUILD:-}" ]]; then
|
||||
venvpy -m bundle.pyinstaller
|
||||
chmod +x "./dist/${EXE_NAME}"
|
||||
fi
|
||||
@ -0,0 +1,51 @@
|
||||
#!/bin/sh
|
||||
set -eu
|
||||
|
||||
if [ -n "${SKIP_ONEFILE_BUILD:-}" ]; then
|
||||
if [ -n "${SKIP_ONEDIR_BUILD:-}" ]; then
|
||||
echo "All executable builds were skipped"
|
||||
exit 1
|
||||
fi
|
||||
echo "Extracting zip to verify onedir build"
|
||||
if command -v python3 >/dev/null 2>&1; then
|
||||
python3 -m zipfile -e "/build/${EXE_NAME}.zip" ./
|
||||
else
|
||||
echo "Attempting to install unzip"
|
||||
if command -v dnf >/dev/null 2>&1; then
|
||||
dnf -y install --allowerasing unzip
|
||||
elif command -v yum >/dev/null 2>&1; then
|
||||
yum -y install unzip
|
||||
elif command -v apt-get >/dev/null 2>&1; then
|
||||
DEBIAN_FRONTEND=noninteractive apt-get update -qq
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -qq -y --no-install-recommends unzip
|
||||
elif command -v apk >/dev/null 2>&1; then
|
||||
apk add --no-cache unzip
|
||||
else
|
||||
echo "Unsupported image"
|
||||
exit 1
|
||||
fi
|
||||
unzip "/build/${EXE_NAME}.zip" -d ./
|
||||
fi
|
||||
chmod +x "./${EXE_NAME}"
|
||||
"./${EXE_NAME}" -v || true
|
||||
"./${EXE_NAME}" --version
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Verifying onefile build"
|
||||
cp "/build/${EXE_NAME}" ./
|
||||
chmod +x "./${EXE_NAME}"
|
||||
|
||||
if [ -z "${UPDATE_TO:-}" ]; then
|
||||
"./${EXE_NAME}" -v || true
|
||||
"./${EXE_NAME}" --version
|
||||
exit 0
|
||||
fi
|
||||
|
||||
cp "./${EXE_NAME}" "./${EXE_NAME}_downgraded"
|
||||
version="$("./${EXE_NAME}" --version)"
|
||||
"./${EXE_NAME}_downgraded" -v --update-to "${UPDATE_TO}"
|
||||
downgraded_version="$("./${EXE_NAME}_downgraded" --version)"
|
||||
if [ "${version}" = "${downgraded_version}" ]; then
|
||||
exit 1
|
||||
fi
|
||||
@ -1,21 +0,0 @@
|
||||
FROM alpine:3.19 as base
|
||||
|
||||
RUN apk --update add --no-cache \
|
||||
build-base \
|
||||
python3 \
|
||||
pipx \
|
||||
;
|
||||
|
||||
RUN pipx install pyinstaller
|
||||
# Requires above step to prepare the shared venv
|
||||
RUN ~/.local/share/pipx/shared/bin/python -m pip install -U wheel
|
||||
RUN apk --update add --no-cache \
|
||||
scons \
|
||||
patchelf \
|
||||
binutils \
|
||||
;
|
||||
RUN pipx install staticx
|
||||
|
||||
WORKDIR /yt-dlp
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
ENTRYPOINT /entrypoint.sh
|
||||
@ -1,13 +0,0 @@
|
||||
#!/bin/ash
|
||||
set -e
|
||||
|
||||
source ~/.local/share/pipx/venvs/pyinstaller/bin/activate
|
||||
python -m devscripts.install_deps --include secretstorage --include curl-cffi
|
||||
python -m devscripts.make_lazy_extractors
|
||||
python devscripts/update-version.py -c "${channel}" -r "${origin}" "${version}"
|
||||
python -m bundle.pyinstaller
|
||||
deactivate
|
||||
|
||||
source ~/.local/share/pipx/venvs/staticx/bin/activate
|
||||
staticx /yt-dlp/dist/yt-dlp_linux /build/yt-dlp_linux
|
||||
deactivate
|
||||
@ -0,0 +1,316 @@
|
||||
import requests
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
import hashlib
|
||||
|
||||
DEFAULT_OUTPUT = 'THIRD_PARTY_LICENSES.txt'
|
||||
CACHE_LOCATION = '.license_cache'
|
||||
HEADER = '''THIRD-PARTY LICENSES
|
||||
|
||||
This file aggregates license texts of third-party components included with the yt-dlp PyInstaller-bundled executables.
|
||||
yt-dlp itself is licensed under the Unlicense (see LICENSE file).
|
||||
Source code for bundled third-party components is available from the original projects.
|
||||
If you cannot obtain it, the maintainers will provide it as per license obligation; maintainer emails are listed in pyproject.toml.'''
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Dependency:
|
||||
name: str
|
||||
license_url: str
|
||||
project_url: str = ''
|
||||
license: str = ''
|
||||
comment: str = ''
|
||||
|
||||
|
||||
DEPENDENCIES: list[Dependency] = [
|
||||
# Core runtime environment components
|
||||
Dependency(
|
||||
name='Python',
|
||||
license='PSF-2.0',
|
||||
license_url='https://raw.githubusercontent.com/python/cpython/refs/heads/main/LICENSE',
|
||||
project_url='https://www.python.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='Microsoft Distributable Code',
|
||||
license_url='https://raw.githubusercontent.com/python/cpython/refs/heads/main/PC/crtlicense.txt',
|
||||
comment='Only included in Windows builds',
|
||||
),
|
||||
Dependency(
|
||||
name='bzip2',
|
||||
license='bzip2-1.0.6',
|
||||
license_url='https://gitlab.com/federicomenaquintero/bzip2/-/raw/master/COPYING',
|
||||
project_url='https://sourceware.org/bzip2/',
|
||||
),
|
||||
Dependency(
|
||||
name='libffi',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/libffi/libffi/refs/heads/master/LICENSE',
|
||||
project_url='https://sourceware.org/libffi/',
|
||||
),
|
||||
Dependency(
|
||||
name='OpenSSL 3.0+',
|
||||
license='Apache-2.0',
|
||||
license_url='https://raw.githubusercontent.com/openssl/openssl/refs/heads/master/LICENSE.txt',
|
||||
project_url='https://www.openssl.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='SQLite',
|
||||
license='Public Domain', # Technically does not need to be included
|
||||
license_url='https://sqlite.org/src/raw/e108e1e69ae8e8a59e93c455654b8ac9356a11720d3345df2a4743e9590fb20d?at=LICENSE.md',
|
||||
project_url='https://www.sqlite.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='liblzma',
|
||||
license='0BSD', # Technically does not need to be included
|
||||
license_url='https://raw.githubusercontent.com/tukaani-project/xz/refs/heads/master/COPYING',
|
||||
project_url='https://tukaani.org/xz/',
|
||||
),
|
||||
Dependency(
|
||||
name='mpdecimal',
|
||||
license='BSD-2-Clause',
|
||||
# No official repo URL
|
||||
license_url='https://gist.githubusercontent.com/seproDev/9e5dbfc08af35c3f2463e64eb9b27161/raw/61f5a98bc1a4ad7d48b1c793fc3314d4d43c2ab1/mpdecimal_COPYRIGHT.txt',
|
||||
project_url='https://www.bytereef.org/mpdecimal/',
|
||||
),
|
||||
Dependency(
|
||||
name='zlib',
|
||||
license='zlib',
|
||||
license_url='https://raw.githubusercontent.com/madler/zlib/refs/heads/develop/LICENSE',
|
||||
project_url='https://zlib.net/',
|
||||
),
|
||||
Dependency(
|
||||
name='Expat',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/libexpat/libexpat/refs/heads/master/COPYING',
|
||||
project_url='https://libexpat.github.io/',
|
||||
),
|
||||
Dependency(
|
||||
name='ncurses',
|
||||
license='X11-distribute-modifications-variant',
|
||||
license_url='https://raw.githubusercontent.com/mirror/ncurses/refs/heads/master/COPYING',
|
||||
comment='Only included in Linux/macOS builds',
|
||||
project_url='https://invisible-island.net/ncurses/',
|
||||
),
|
||||
Dependency(
|
||||
name='GNU Readline',
|
||||
license='GPL-3.0-or-later',
|
||||
license_url='https://tiswww.case.edu/php/chet/readline/COPYING',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://www.gnu.org/software/readline/',
|
||||
),
|
||||
Dependency(
|
||||
name='libstdc++',
|
||||
license='GPL-3.0-with-GCC-exception',
|
||||
license_url='https://raw.githubusercontent.com/gcc-mirror/gcc/refs/heads/master/COPYING.RUNTIME',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://gcc.gnu.org/onlinedocs/libstdc++/',
|
||||
),
|
||||
Dependency(
|
||||
name='libgcc',
|
||||
license='GPL-3.0-with-GCC-exception',
|
||||
license_url='https://raw.githubusercontent.com/gcc-mirror/gcc/refs/heads/master/COPYING.RUNTIME',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://gcc.gnu.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='libuuid',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git/plain/lib/uuid/COPYING',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git/tree/lib/uuid',
|
||||
),
|
||||
Dependency(
|
||||
name='libintl',
|
||||
license='LGPL-2.1-or-later',
|
||||
license_url='https://raw.githubusercontent.com/autotools-mirror/gettext/refs/heads/master/gettext-runtime/intl/COPYING.LIB',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://www.gnu.org/software/gettext/',
|
||||
),
|
||||
Dependency(
|
||||
name='libidn2',
|
||||
license='LGPL-3.0-or-later',
|
||||
license_url='https://gitlab.com/libidn/libidn2/-/raw/master/COPYING.LESSERv3',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://www.gnu.org/software/libidn/',
|
||||
),
|
||||
Dependency(
|
||||
name='libidn2 (Unicode character data files)',
|
||||
license='Unicode-TOU AND Unicode-DFS-2016',
|
||||
license_url='https://gitlab.com/libidn/libidn2/-/raw/master/COPYING.unicode',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://www.gnu.org/software/libidn/',
|
||||
),
|
||||
Dependency(
|
||||
name='libunistring',
|
||||
license='LGPL-3.0-or-later',
|
||||
license_url='https://gitweb.git.savannah.gnu.org/gitweb/?p=libunistring.git;a=blob_plain;f=COPYING.LIB;hb=HEAD',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://www.gnu.org/software/libunistring/',
|
||||
),
|
||||
Dependency(
|
||||
name='librtmp',
|
||||
license='LGPL-2.1-or-later',
|
||||
# No official repo URL
|
||||
license_url='https://gist.githubusercontent.com/seproDev/31d8c691ccddebe37b8b379307cb232d/raw/053408e98547ea8c7d9ba3a80c965f33e163b881/librtmp_COPYING.txt',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://rtmpdump.mplayerhq.hu/',
|
||||
),
|
||||
Dependency(
|
||||
name='zstd',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/facebook/zstd/refs/heads/dev/LICENSE',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://facebook.github.io/zstd/',
|
||||
),
|
||||
|
||||
# Python packages
|
||||
Dependency(
|
||||
name='brotli',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/google/brotli/refs/heads/master/LICENSE',
|
||||
project_url='https://brotli.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='curl_cffi',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/lexiforest/curl_cffi/refs/heads/main/LICENSE',
|
||||
comment='Not included in `yt-dlp_x86` and `yt-dlp_musllinux_aarch64` builds',
|
||||
project_url='https://curl-cffi.readthedocs.io/',
|
||||
),
|
||||
# Dependency of curl_cffi
|
||||
Dependency(
|
||||
name='curl-impersonate',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/lexiforest/curl-impersonate/refs/heads/main/LICENSE',
|
||||
comment='Not included in `yt-dlp_x86` and `yt-dlp_musllinux_aarch64` builds',
|
||||
project_url='https://github.com/lexiforest/curl-impersonate',
|
||||
),
|
||||
Dependency(
|
||||
name='cffi',
|
||||
license='MIT-0', # Technically does not need to be included
|
||||
license_url='https://raw.githubusercontent.com/python-cffi/cffi/refs/heads/main/LICENSE',
|
||||
project_url='https://cffi.readthedocs.io/',
|
||||
),
|
||||
# Dependecy of cffi
|
||||
Dependency(
|
||||
name='pycparser',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/eliben/pycparser/refs/heads/main/LICENSE',
|
||||
project_url='https://github.com/eliben/pycparser',
|
||||
),
|
||||
Dependency(
|
||||
name='mutagen',
|
||||
license='GPL-2.0-or-later',
|
||||
license_url='https://raw.githubusercontent.com/quodlibet/mutagen/refs/heads/main/COPYING',
|
||||
project_url='https://mutagen.readthedocs.io/',
|
||||
),
|
||||
Dependency(
|
||||
name='PyCryptodome',
|
||||
license='Public Domain and BSD-2-Clause',
|
||||
license_url='https://raw.githubusercontent.com/Legrandin/pycryptodome/refs/heads/master/LICENSE.rst',
|
||||
project_url='https://www.pycryptodome.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='certifi',
|
||||
license='MPL-2.0',
|
||||
license_url='https://raw.githubusercontent.com/certifi/python-certifi/refs/heads/master/LICENSE',
|
||||
project_url='https://github.com/certifi/python-certifi',
|
||||
),
|
||||
Dependency(
|
||||
name='requests',
|
||||
license='Apache-2.0',
|
||||
license_url='https://raw.githubusercontent.com/psf/requests/refs/heads/main/LICENSE',
|
||||
project_url='https://requests.readthedocs.io/',
|
||||
),
|
||||
# Dependency of requests
|
||||
Dependency(
|
||||
name='charset-normalizer',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/jawah/charset_normalizer/refs/heads/master/LICENSE',
|
||||
project_url='https://charset-normalizer.readthedocs.io/',
|
||||
),
|
||||
# Dependency of requests
|
||||
Dependency(
|
||||
name='idna',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/kjd/idna/refs/heads/master/LICENSE.md',
|
||||
project_url='https://github.com/kjd/idna',
|
||||
),
|
||||
Dependency(
|
||||
name='urllib3',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/urllib3/urllib3/refs/heads/main/LICENSE.txt',
|
||||
project_url='https://urllib3.readthedocs.io/',
|
||||
),
|
||||
Dependency(
|
||||
name='SecretStorage',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/mitya57/secretstorage/refs/heads/master/LICENSE',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://secretstorage.readthedocs.io/',
|
||||
),
|
||||
# Dependency of SecretStorage
|
||||
Dependency(
|
||||
name='cryptography',
|
||||
license='Apache-2.0', # Also available as BSD-3-Clause
|
||||
license_url='https://raw.githubusercontent.com/pyca/cryptography/refs/heads/main/LICENSE.APACHE',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://cryptography.io/',
|
||||
),
|
||||
# Dependency of SecretStorage
|
||||
Dependency(
|
||||
name='Jeepney',
|
||||
license='MIT',
|
||||
license_url='https://gitlab.com/takluyver/jeepney/-/raw/master/LICENSE',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://jeepney.readthedocs.io/',
|
||||
),
|
||||
Dependency(
|
||||
name='websockets',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/python-websockets/websockets/refs/heads/main/LICENSE',
|
||||
project_url='https://websockets.readthedocs.io/',
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def fetch_text(dep: Dependency) -> str:
|
||||
cache_dir = Path(CACHE_LOCATION)
|
||||
cache_dir.mkdir(exist_ok=True)
|
||||
url_hash = hashlib.sha256(dep.license_url.encode('utf-8')).hexdigest()
|
||||
cache_file = cache_dir / f'{url_hash}.txt'
|
||||
|
||||
if cache_file.exists():
|
||||
return cache_file.read_text()
|
||||
|
||||
# UA needed since some domains block requests default UA
|
||||
req = requests.get(dep.license_url, headers={'User-Agent': 'yt-dlp license fetcher'})
|
||||
req.raise_for_status()
|
||||
text = req.text
|
||||
cache_file.write_text(text)
|
||||
return text
|
||||
|
||||
|
||||
def build_output() -> str:
|
||||
lines = [HEADER]
|
||||
for d in DEPENDENCIES:
|
||||
lines.append('\n')
|
||||
lines.append('-' * 80)
|
||||
header = f'{d.name}'
|
||||
if d.license:
|
||||
header += f' | {d.license}'
|
||||
if d.comment:
|
||||
header += f'\nNote: {d.comment}'
|
||||
if d.project_url:
|
||||
header += f'\nURL: {d.project_url}'
|
||||
lines.append(header)
|
||||
lines.append('-' * 80)
|
||||
|
||||
text = fetch_text(d)
|
||||
lines.append(text.strip('\n') + '\n')
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
content = build_output()
|
||||
Path(DEFAULT_OUTPUT).write_text(content)
|
||||
@ -0,0 +1,157 @@
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import datetime as dt
|
||||
import json
|
||||
|
||||
from devscripts.utils import calculate_version
|
||||
|
||||
|
||||
STABLE_REPOSITORY = 'yt-dlp/yt-dlp'
|
||||
|
||||
|
||||
def setup_variables(environment):
|
||||
"""
|
||||
`environment` must contain these keys:
|
||||
REPOSITORY, INPUTS, PROCESSED,
|
||||
PUSH_VERSION_COMMIT, PYPI_PROJECT,
|
||||
SOURCE_PYPI_PROJECT, SOURCE_PYPI_SUFFIX,
|
||||
TARGET_PYPI_PROJECT, TARGET_PYPI_SUFFIX,
|
||||
SOURCE_ARCHIVE_REPO, TARGET_ARCHIVE_REPO,
|
||||
HAS_SOURCE_ARCHIVE_REPO_TOKEN,
|
||||
HAS_TARGET_ARCHIVE_REPO_TOKEN,
|
||||
HAS_ARCHIVE_REPO_TOKEN
|
||||
|
||||
`INPUTS` must contain these keys:
|
||||
prerelease
|
||||
|
||||
`PROCESSED` must contain these keys:
|
||||
source_repo, source_tag,
|
||||
target_repo, target_tag
|
||||
"""
|
||||
REPOSITORY = environment['REPOSITORY']
|
||||
INPUTS = json.loads(environment['INPUTS'])
|
||||
PROCESSED = json.loads(environment['PROCESSED'])
|
||||
|
||||
source_channel = None
|
||||
does_not_have_needed_token = False
|
||||
target_repo_token = None
|
||||
pypi_project = None
|
||||
pypi_suffix = None
|
||||
|
||||
source_repo = PROCESSED['source_repo']
|
||||
source_tag = PROCESSED['source_tag']
|
||||
if source_repo == 'stable':
|
||||
source_repo = STABLE_REPOSITORY
|
||||
if not source_repo:
|
||||
source_repo = REPOSITORY
|
||||
elif environment['SOURCE_ARCHIVE_REPO']:
|
||||
source_channel = environment['SOURCE_ARCHIVE_REPO']
|
||||
elif not source_tag and '/' not in source_repo:
|
||||
source_tag = source_repo
|
||||
source_repo = REPOSITORY
|
||||
|
||||
resolved_source = source_repo
|
||||
if source_tag:
|
||||
resolved_source = f'{resolved_source}@{source_tag}'
|
||||
elif source_repo == STABLE_REPOSITORY:
|
||||
resolved_source = 'stable'
|
||||
|
||||
revision = None
|
||||
if INPUTS['prerelease'] or not environment['PUSH_VERSION_COMMIT']:
|
||||
revision = dt.datetime.now(tz=dt.timezone.utc).strftime('%H%M%S')
|
||||
|
||||
version = calculate_version(INPUTS.get('version') or revision)
|
||||
|
||||
target_repo = PROCESSED['target_repo']
|
||||
target_tag = PROCESSED['target_tag']
|
||||
if target_repo:
|
||||
if target_repo == 'stable':
|
||||
target_repo = STABLE_REPOSITORY
|
||||
if not target_tag:
|
||||
if target_repo == STABLE_REPOSITORY:
|
||||
target_tag = version
|
||||
elif environment['TARGET_ARCHIVE_REPO']:
|
||||
target_tag = source_tag or version
|
||||
else:
|
||||
target_tag = target_repo
|
||||
target_repo = REPOSITORY
|
||||
if target_repo != REPOSITORY:
|
||||
target_repo = environment['TARGET_ARCHIVE_REPO']
|
||||
target_repo_token = f'{PROCESSED["target_repo"].upper()}_ARCHIVE_REPO_TOKEN'
|
||||
if not json.loads(environment['HAS_TARGET_ARCHIVE_REPO_TOKEN']):
|
||||
does_not_have_needed_token = True
|
||||
pypi_project = environment['TARGET_PYPI_PROJECT'] or None
|
||||
pypi_suffix = environment['TARGET_PYPI_SUFFIX'] or None
|
||||
else:
|
||||
target_tag = source_tag or version
|
||||
if source_channel:
|
||||
target_repo = source_channel
|
||||
target_repo_token = f'{PROCESSED["source_repo"].upper()}_ARCHIVE_REPO_TOKEN'
|
||||
if not json.loads(environment['HAS_SOURCE_ARCHIVE_REPO_TOKEN']):
|
||||
does_not_have_needed_token = True
|
||||
pypi_project = environment['SOURCE_PYPI_PROJECT'] or None
|
||||
pypi_suffix = environment['SOURCE_PYPI_SUFFIX'] or None
|
||||
else:
|
||||
target_repo = REPOSITORY
|
||||
|
||||
if does_not_have_needed_token:
|
||||
if not json.loads(environment['HAS_ARCHIVE_REPO_TOKEN']):
|
||||
print(f'::error::Repository access secret {target_repo_token} not found')
|
||||
return None
|
||||
target_repo_token = 'ARCHIVE_REPO_TOKEN'
|
||||
|
||||
if target_repo == REPOSITORY and not INPUTS['prerelease']:
|
||||
pypi_project = environment['PYPI_PROJECT'] or None
|
||||
|
||||
return {
|
||||
'channel': resolved_source,
|
||||
'version': version,
|
||||
'target_repo': target_repo,
|
||||
'target_repo_token': target_repo_token,
|
||||
'target_tag': target_tag,
|
||||
'pypi_project': pypi_project,
|
||||
'pypi_suffix': pypi_suffix,
|
||||
}
|
||||
|
||||
|
||||
def process_inputs(inputs):
|
||||
outputs = {}
|
||||
for key in ('source', 'target'):
|
||||
repo, _, tag = inputs.get(key, '').partition('@')
|
||||
outputs[f'{key}_repo'] = repo
|
||||
outputs[f'{key}_tag'] = tag
|
||||
return outputs
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if not os.getenv('GITHUB_OUTPUT'):
|
||||
print('This script is only intended for use with GitHub Actions', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if 'process_inputs' in sys.argv:
|
||||
inputs = json.loads(os.environ['INPUTS'])
|
||||
print('::group::Inputs')
|
||||
print(json.dumps(inputs, indent=2))
|
||||
print('::endgroup::')
|
||||
outputs = process_inputs(inputs)
|
||||
print('::group::Processed')
|
||||
print(json.dumps(outputs, indent=2))
|
||||
print('::endgroup::')
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||
f.write('\n'.join(f'{key}={value}' for key, value in outputs.items()))
|
||||
sys.exit(0)
|
||||
|
||||
outputs = setup_variables(dict(os.environ))
|
||||
if not outputs:
|
||||
sys.exit(1)
|
||||
|
||||
print('::group::Output variables')
|
||||
print(json.dumps(outputs, indent=2))
|
||||
print('::endgroup::')
|
||||
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||
f.write('\n'.join(f'{key}={value or ""}' for key, value in outputs.items()))
|
||||
@ -0,0 +1,324 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import datetime as dt
|
||||
import json
|
||||
|
||||
from devscripts.setup_variables import STABLE_REPOSITORY, process_inputs, setup_variables
|
||||
from devscripts.utils import calculate_version
|
||||
|
||||
|
||||
def _test(github_repository, note, repo_vars, repo_secrets, inputs, expected=None, ignore_revision=False):
|
||||
inp = inputs.copy()
|
||||
inp.setdefault('linux_armv7l', True)
|
||||
inp.setdefault('prerelease', False)
|
||||
processed = process_inputs(inp)
|
||||
source_repo = processed['source_repo'].upper()
|
||||
target_repo = processed['target_repo'].upper()
|
||||
variables = {k.upper(): v for k, v in repo_vars.items()}
|
||||
secrets = {k.upper(): v for k, v in repo_secrets.items()}
|
||||
|
||||
env = {
|
||||
# Keep this in sync with prepare.setup_variables in release.yml
|
||||
'INPUTS': json.dumps(inp),
|
||||
'PROCESSED': json.dumps(processed),
|
||||
'REPOSITORY': github_repository,
|
||||
'PUSH_VERSION_COMMIT': variables.get('PUSH_VERSION_COMMIT') or '',
|
||||
'PYPI_PROJECT': variables.get('PYPI_PROJECT') or '',
|
||||
'SOURCE_PYPI_PROJECT': variables.get(f'{source_repo}_PYPI_PROJECT') or '',
|
||||
'SOURCE_PYPI_SUFFIX': variables.get(f'{source_repo}_PYPI_SUFFIX') or '',
|
||||
'TARGET_PYPI_PROJECT': variables.get(f'{target_repo}_PYPI_PROJECT') or '',
|
||||
'TARGET_PYPI_SUFFIX': variables.get(f'{target_repo}_PYPI_SUFFIX') or '',
|
||||
'SOURCE_ARCHIVE_REPO': variables.get(f'{source_repo}_ARCHIVE_REPO') or '',
|
||||
'TARGET_ARCHIVE_REPO': variables.get(f'{target_repo}_ARCHIVE_REPO') or '',
|
||||
'HAS_SOURCE_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get(f'{source_repo}_ARCHIVE_REPO_TOKEN'))),
|
||||
'HAS_TARGET_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get(f'{target_repo}_ARCHIVE_REPO_TOKEN'))),
|
||||
'HAS_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get('ARCHIVE_REPO_TOKEN'))),
|
||||
}
|
||||
|
||||
result = setup_variables(env)
|
||||
if not expected:
|
||||
print(' {\n' + '\n'.join(f' {k!r}: {v!r},' for k, v in result.items()) + '\n }')
|
||||
return
|
||||
|
||||
exp = expected.copy()
|
||||
if ignore_revision:
|
||||
assert len(result['version']) == len(exp['version']), f'revision missing: {github_repository} {note}'
|
||||
version_is_tag = result['version'] == result['target_tag']
|
||||
for dct in (result, exp):
|
||||
dct['version'] = '.'.join(dct['version'].split('.')[:3])
|
||||
if version_is_tag:
|
||||
dct['target_tag'] = dct['version']
|
||||
assert result == exp, f'unexpected result: {github_repository} {note}'
|
||||
|
||||
|
||||
def test_setup_variables():
|
||||
DEFAULT_VERSION_WITH_REVISION = dt.datetime.now(tz=dt.timezone.utc).strftime('%Y.%m.%d.%H%M%S')
|
||||
DEFAULT_VERSION = calculate_version()
|
||||
BASE_REPO_VARS = {
|
||||
'MASTER_ARCHIVE_REPO': 'yt-dlp/yt-dlp-master-builds',
|
||||
'NIGHTLY_ARCHIVE_REPO': 'yt-dlp/yt-dlp-nightly-builds',
|
||||
'NIGHTLY_PYPI_PROJECT': 'yt-dlp',
|
||||
'NIGHTLY_PYPI_SUFFIX': 'dev',
|
||||
'PUSH_VERSION_COMMIT': '1',
|
||||
'PYPI_PROJECT': 'yt-dlp',
|
||||
}
|
||||
BASE_REPO_SECRETS = {
|
||||
'ARCHIVE_REPO_TOKEN': '1',
|
||||
}
|
||||
FORK_REPOSITORY = 'fork/yt-dlp'
|
||||
FORK_ORG = FORK_REPOSITORY.partition('/')[0]
|
||||
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, stable',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {}, {
|
||||
'channel': 'stable',
|
||||
'version': DEFAULT_VERSION,
|
||||
'target_repo': STABLE_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION,
|
||||
'pypi_project': 'yt-dlp',
|
||||
'pypi_suffix': None,
|
||||
})
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, nightly (w/o target)',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'source': 'nightly',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'nightly',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': 'yt-dlp/yt-dlp-nightly-builds',
|
||||
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': 'yt-dlp',
|
||||
'pypi_suffix': 'dev',
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, nightly',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'source': 'nightly',
|
||||
'target': 'nightly',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'nightly',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': 'yt-dlp/yt-dlp-nightly-builds',
|
||||
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': 'yt-dlp',
|
||||
'pypi_suffix': 'dev',
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, master (w/o target)',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'source': 'master',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'master',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': 'yt-dlp/yt-dlp-master-builds',
|
||||
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, master',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'source': 'master',
|
||||
'target': 'master',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'master',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': 'yt-dlp/yt-dlp-master-builds',
|
||||
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, special tag, updates to stable',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'target': f'{STABLE_REPOSITORY}@experimental',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'stable',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': STABLE_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'experimental',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, special tag, "stable" as target repo',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'target': 'stable@experimental',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'stable',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': STABLE_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'experimental',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/o vars/secrets, stable',
|
||||
{}, {}, {}, {
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/o vars/secrets, prerelease',
|
||||
{}, {}, {'prerelease': True}, {
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/o vars/secrets, nightly',
|
||||
{}, {}, {
|
||||
'prerelease': True,
|
||||
'source': 'nightly',
|
||||
'target': 'nightly',
|
||||
}, {
|
||||
'channel': f'{FORK_REPOSITORY}@nightly',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'nightly',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/o vars/secrets, master',
|
||||
{}, {}, {
|
||||
'prerelease': True,
|
||||
'source': 'master',
|
||||
'target': 'master',
|
||||
}, {
|
||||
'channel': f'{FORK_REPOSITORY}@master',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'master',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/o vars/secrets, revision',
|
||||
{}, {}, {'version': '123'}, {
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': f'{DEFAULT_VERSION[:10]}.123',
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': f'{DEFAULT_VERSION[:10]}.123',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
})
|
||||
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/ PUSH_VERSION_COMMIT, stable',
|
||||
{'PUSH_VERSION_COMMIT': '1'}, {}, {}, {
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': DEFAULT_VERSION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
})
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/ PUSH_VERSION_COMMIT, prerelease',
|
||||
{'PUSH_VERSION_COMMIT': '1'}, {}, {'prerelease': True}, {
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/NIGHTLY_ARCHIVE_REPO_TOKEN, nightly', {
|
||||
'NIGHTLY_ARCHIVE_REPO': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||
'PYPI_PROJECT': 'yt-dlp-test',
|
||||
}, {
|
||||
'NIGHTLY_ARCHIVE_REPO_TOKEN': '1',
|
||||
}, {
|
||||
'source': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||
'target': 'nightly',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||
'target_repo_token': 'NIGHTLY_ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/MASTER_ARCHIVE_REPO_TOKEN, master', {
|
||||
'MASTER_ARCHIVE_REPO': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||
'MASTER_PYPI_PROJECT': 'yt-dlp-test',
|
||||
'MASTER_PYPI_SUFFIX': 'dev',
|
||||
}, {
|
||||
'MASTER_ARCHIVE_REPO_TOKEN': '1',
|
||||
}, {
|
||||
'source': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||
'target': 'master',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||
'target_repo_token': 'MASTER_ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': 'yt-dlp-test',
|
||||
'pypi_suffix': 'dev',
|
||||
}, ignore_revision=True)
|
||||
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork, non-numeric tag',
|
||||
{}, {}, {'source': 'experimental'}, {
|
||||
'channel': f'{FORK_REPOSITORY}@experimental',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'experimental',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork, non-numeric tag, updates to stable',
|
||||
{}, {}, {
|
||||
'prerelease': True,
|
||||
'source': 'stable',
|
||||
'target': 'experimental',
|
||||
}, {
|
||||
'channel': 'stable',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'experimental',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
@ -1,39 +0,0 @@
|
||||
[flake8]
|
||||
exclude = build,venv,.tox,.git,.pytest_cache
|
||||
ignore = E402,E501,E731,E741,W503
|
||||
max_line_length = 120
|
||||
per_file_ignores =
|
||||
devscripts/lazy_load_template.py: F401
|
||||
|
||||
|
||||
[autoflake]
|
||||
ignore-init-module-imports = true
|
||||
ignore-pass-after-docstring = true
|
||||
remove-all-unused-imports = true
|
||||
remove-duplicate-keys = true
|
||||
remove-unused-variables = true
|
||||
|
||||
|
||||
[tox:tox]
|
||||
skipsdist = true
|
||||
envlist = py{39,310,311,312,313},pypy310
|
||||
skip_missing_interpreters = true
|
||||
|
||||
[testenv] # tox
|
||||
deps =
|
||||
pytest
|
||||
commands = pytest {posargs:"-m not download"}
|
||||
passenv = HOME # For test_compat_expanduser
|
||||
setenv =
|
||||
# PYTHONWARNINGS = error # Catches PIP's warnings too
|
||||
|
||||
|
||||
[isort]
|
||||
py_version = 39
|
||||
multi_line_output = VERTICAL_HANGING_INDENT
|
||||
line_length = 80
|
||||
reverse_relative = true
|
||||
ensure_newline_before_comments = true
|
||||
include_trailing_comma = true
|
||||
known_first_party =
|
||||
test
|
||||
@ -0,0 +1,235 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import datetime as dt
|
||||
import json
|
||||
import math
|
||||
import re
|
||||
import unittest
|
||||
|
||||
from yt_dlp.utils.jslib import devalue
|
||||
|
||||
|
||||
TEST_CASES_EQUALS = [{
|
||||
'name': 'int',
|
||||
'unparsed': [-42],
|
||||
'parsed': -42,
|
||||
}, {
|
||||
'name': 'str',
|
||||
'unparsed': ['woo!!!'],
|
||||
'parsed': 'woo!!!',
|
||||
}, {
|
||||
'name': 'Number',
|
||||
'unparsed': [['Object', 42]],
|
||||
'parsed': 42,
|
||||
}, {
|
||||
'name': 'String',
|
||||
'unparsed': [['Object', 'yar']],
|
||||
'parsed': 'yar',
|
||||
}, {
|
||||
'name': 'Infinity',
|
||||
'unparsed': -4,
|
||||
'parsed': math.inf,
|
||||
}, {
|
||||
'name': 'negative Infinity',
|
||||
'unparsed': -5,
|
||||
'parsed': -math.inf,
|
||||
}, {
|
||||
'name': 'negative zero',
|
||||
'unparsed': -6,
|
||||
'parsed': -0.0,
|
||||
}, {
|
||||
'name': 'RegExp',
|
||||
'unparsed': [['RegExp', 'regexp', 'gim']], # XXX: flags are ignored
|
||||
'parsed': re.compile('regexp'),
|
||||
}, {
|
||||
'name': 'Date',
|
||||
'unparsed': [['Date', '2001-09-09T01:46:40.000Z']],
|
||||
'parsed': dt.datetime.fromtimestamp(1e9, tz=dt.timezone.utc),
|
||||
}, {
|
||||
'name': 'Array',
|
||||
'unparsed': [[1, 2, 3], 'a', 'b', 'c'],
|
||||
'parsed': ['a', 'b', 'c'],
|
||||
}, {
|
||||
'name': 'Array (empty)',
|
||||
'unparsed': [[]],
|
||||
'parsed': [],
|
||||
}, {
|
||||
'name': 'Array (sparse)',
|
||||
'unparsed': [[-2, 1, -2], 'b'],
|
||||
'parsed': [None, 'b', None],
|
||||
}, {
|
||||
'name': 'Object',
|
||||
'unparsed': [{'foo': 1, 'x-y': 2}, 'bar', 'z'],
|
||||
'parsed': {'foo': 'bar', 'x-y': 'z'},
|
||||
}, {
|
||||
'name': 'Set',
|
||||
'unparsed': [['Set', 1, 2, 3], 1, 2, 3],
|
||||
'parsed': [1, 2, 3],
|
||||
}, {
|
||||
'name': 'Map',
|
||||
'unparsed': [['Map', 1, 2], 'a', 'b'],
|
||||
'parsed': [['a', 'b']],
|
||||
}, {
|
||||
'name': 'BigInt',
|
||||
'unparsed': [['BigInt', '1']],
|
||||
'parsed': 1,
|
||||
}, {
|
||||
'name': 'Uint8Array',
|
||||
'unparsed': [['Uint8Array', 'AQID']],
|
||||
'parsed': [1, 2, 3],
|
||||
}, {
|
||||
'name': 'ArrayBuffer',
|
||||
'unparsed': [['ArrayBuffer', 'AQID']],
|
||||
'parsed': [1, 2, 3],
|
||||
}, {
|
||||
'name': 'str (repetition)',
|
||||
'unparsed': [[1, 1], 'a string'],
|
||||
'parsed': ['a string', 'a string'],
|
||||
}, {
|
||||
'name': 'None (repetition)',
|
||||
'unparsed': [[1, 1], None],
|
||||
'parsed': [None, None],
|
||||
}, {
|
||||
'name': 'dict (repetition)',
|
||||
'unparsed': [[1, 1], {}],
|
||||
'parsed': [{}, {}],
|
||||
}, {
|
||||
'name': 'Object without prototype',
|
||||
'unparsed': [['null']],
|
||||
'parsed': {},
|
||||
}, {
|
||||
'name': 'cross-realm POJO',
|
||||
'unparsed': [{}],
|
||||
'parsed': {},
|
||||
}]
|
||||
|
||||
TEST_CASES_IS = [{
|
||||
'name': 'bool',
|
||||
'unparsed': [True],
|
||||
'parsed': True,
|
||||
}, {
|
||||
'name': 'Boolean',
|
||||
'unparsed': [['Object', False]],
|
||||
'parsed': False,
|
||||
}, {
|
||||
'name': 'undefined',
|
||||
'unparsed': -1,
|
||||
'parsed': None,
|
||||
}, {
|
||||
'name': 'null',
|
||||
'unparsed': [None],
|
||||
'parsed': None,
|
||||
}, {
|
||||
'name': 'NaN',
|
||||
'unparsed': -3,
|
||||
'parsed': math.nan,
|
||||
}]
|
||||
|
||||
TEST_CASES_INVALID = [{
|
||||
'name': 'empty string',
|
||||
'unparsed': '',
|
||||
'error': ValueError,
|
||||
'pattern': r'expected int or list as input',
|
||||
}, {
|
||||
'name': 'hole',
|
||||
'unparsed': -2,
|
||||
'error': ValueError,
|
||||
'pattern': r'invalid integer input',
|
||||
}, {
|
||||
'name': 'string',
|
||||
'unparsed': 'hello',
|
||||
'error': ValueError,
|
||||
'pattern': r'expected int or list as input',
|
||||
}, {
|
||||
'name': 'number',
|
||||
'unparsed': 42,
|
||||
'error': ValueError,
|
||||
'pattern': r'invalid integer input',
|
||||
}, {
|
||||
'name': 'boolean',
|
||||
'unparsed': True,
|
||||
'error': ValueError,
|
||||
'pattern': r'expected int or list as input',
|
||||
}, {
|
||||
'name': 'null',
|
||||
'unparsed': None,
|
||||
'error': ValueError,
|
||||
'pattern': r'expected int or list as input',
|
||||
}, {
|
||||
'name': 'object',
|
||||
'unparsed': {},
|
||||
'error': ValueError,
|
||||
'pattern': r'expected int or list as input',
|
||||
}, {
|
||||
'name': 'empty array',
|
||||
'unparsed': [],
|
||||
'error': ValueError,
|
||||
'pattern': r'expected a non-empty list as input',
|
||||
}, {
|
||||
'name': 'Python negative indexing',
|
||||
'unparsed': [[1, 2, 3, 4, 5, 6, 7, -7], 1, 2, 3, 4, 5, 6, 7],
|
||||
'error': IndexError,
|
||||
'pattern': r'invalid index: -7',
|
||||
}]
|
||||
|
||||
|
||||
class TestDevalue(unittest.TestCase):
|
||||
def test_devalue_parse_equals(self):
|
||||
for tc in TEST_CASES_EQUALS:
|
||||
self.assertEqual(devalue.parse(tc['unparsed']), tc['parsed'], tc['name'])
|
||||
|
||||
def test_devalue_parse_is(self):
|
||||
for tc in TEST_CASES_IS:
|
||||
self.assertIs(devalue.parse(tc['unparsed']), tc['parsed'], tc['name'])
|
||||
|
||||
def test_devalue_parse_invalid(self):
|
||||
for tc in TEST_CASES_INVALID:
|
||||
with self.assertRaisesRegex(tc['error'], tc['pattern'], msg=tc['name']):
|
||||
devalue.parse(tc['unparsed'])
|
||||
|
||||
def test_devalue_parse_cyclical(self):
|
||||
name = 'Map (cyclical)'
|
||||
result = devalue.parse([['Map', 1, 0], 'self'])
|
||||
self.assertEqual(result[0][0], 'self', name)
|
||||
self.assertIs(result, result[0][1], name)
|
||||
|
||||
name = 'Set (cyclical)'
|
||||
result = devalue.parse([['Set', 0, 1], 42])
|
||||
self.assertEqual(result[1], 42, name)
|
||||
self.assertIs(result, result[0], name)
|
||||
|
||||
result = devalue.parse([[0]])
|
||||
self.assertIs(result, result[0], 'Array (cyclical)')
|
||||
|
||||
name = 'Object (cyclical)'
|
||||
result = devalue.parse([{'self': 0}])
|
||||
self.assertIs(result, result['self'], name)
|
||||
|
||||
name = 'Object with null prototype (cyclical)'
|
||||
result = devalue.parse([['null', 'self', 0]])
|
||||
self.assertIs(result, result['self'], name)
|
||||
|
||||
name = 'Objects (cyclical)'
|
||||
result = devalue.parse([[1, 2], {'second': 2}, {'first': 1}])
|
||||
self.assertIs(result[0], result[1]['first'], name)
|
||||
self.assertIs(result[1], result[0]['second'], name)
|
||||
|
||||
def test_devalue_parse_revivers(self):
|
||||
self.assertEqual(
|
||||
devalue.parse([['indirect', 1], {'a': 2}, 'b'], revivers={'indirect': lambda x: x}),
|
||||
{'a': 'b'}, 'revivers (indirect)')
|
||||
|
||||
self.assertEqual(
|
||||
devalue.parse([['parse', 1], '{"a":0}'], revivers={'parse': lambda x: json.loads(x)}),
|
||||
{'a': 0}, 'revivers (parse)')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@ -0,0 +1,71 @@
|
||||
import collections
|
||||
|
||||
import pytest
|
||||
|
||||
from yt_dlp import YoutubeDL
|
||||
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||
from yt_dlp.extractor.common import InfoExtractor
|
||||
from yt_dlp.extractor.youtube.pot._provider import IEContentProviderLogger
|
||||
from yt_dlp.extractor.youtube.pot.provider import PoTokenRequest, PoTokenContext
|
||||
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||
|
||||
|
||||
class MockLogger(IEContentProviderLogger):
|
||||
|
||||
log_level = IEContentProviderLogger.LogLevel.TRACE
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.messages = collections.defaultdict(list)
|
||||
|
||||
def trace(self, message: str):
|
||||
self.messages['trace'].append(message)
|
||||
|
||||
def debug(self, message: str):
|
||||
self.messages['debug'].append(message)
|
||||
|
||||
def info(self, message: str):
|
||||
self.messages['info'].append(message)
|
||||
|
||||
def warning(self, message: str, *, once=False):
|
||||
self.messages['warning'].append(message)
|
||||
|
||||
def error(self, message: str):
|
||||
self.messages['error'].append(message)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ie() -> InfoExtractor:
|
||||
ydl = YoutubeDL()
|
||||
return ydl.get_info_extractor('Youtube')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def logger() -> MockLogger:
|
||||
return MockLogger()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def pot_request() -> PoTokenRequest:
|
||||
return PoTokenRequest(
|
||||
context=PoTokenContext.GVS,
|
||||
innertube_context={'client': {'clientName': 'WEB'}},
|
||||
innertube_host='youtube.com',
|
||||
session_index=None,
|
||||
player_url=None,
|
||||
is_authenticated=False,
|
||||
video_webpage=None,
|
||||
|
||||
visitor_data='example-visitor-data',
|
||||
data_sync_id='example-data-sync-id',
|
||||
video_id='example-video-id',
|
||||
|
||||
request_cookiejar=YoutubeDLCookieJar(),
|
||||
request_proxy=None,
|
||||
request_headers=HTTPHeaderDict(),
|
||||
request_timeout=None,
|
||||
request_source_address=None,
|
||||
request_verify_tls=True,
|
||||
|
||||
bypass_cache=False,
|
||||
)
|
||||
@ -0,0 +1,117 @@
|
||||
import threading
|
||||
import time
|
||||
from collections import OrderedDict
|
||||
import pytest
|
||||
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider, BuiltinIEContentProvider
|
||||
from yt_dlp.utils import bug_reports_message
|
||||
from yt_dlp.extractor.youtube.pot._builtin.memory_cache import MemoryLRUPCP, memorylru_preference, initialize_global_cache
|
||||
from yt_dlp.version import __version__
|
||||
from yt_dlp.extractor.youtube.pot._registry import _pot_cache_providers, _pot_memory_cache
|
||||
|
||||
|
||||
class TestMemoryLRUPCS:
|
||||
|
||||
def test_base_type(self):
|
||||
assert issubclass(MemoryLRUPCP, IEContentProvider)
|
||||
assert issubclass(MemoryLRUPCP, BuiltinIEContentProvider)
|
||||
|
||||
@pytest.fixture
|
||||
def pcp(self, ie, logger) -> MemoryLRUPCP:
|
||||
return MemoryLRUPCP(ie, logger, {}, initialize_cache=lambda max_size: (OrderedDict(), threading.Lock(), max_size))
|
||||
|
||||
def test_is_registered(self):
|
||||
assert _pot_cache_providers.value.get('MemoryLRU') == MemoryLRUPCP
|
||||
|
||||
def test_initialization(self, pcp):
|
||||
assert pcp.PROVIDER_NAME == 'memory'
|
||||
assert pcp.PROVIDER_VERSION == __version__
|
||||
assert pcp.BUG_REPORT_MESSAGE == bug_reports_message(before='')
|
||||
assert pcp.is_available()
|
||||
|
||||
def test_store_and_get(self, pcp):
|
||||
pcp.store('key1', 'value1', int(time.time()) + 60)
|
||||
assert pcp.get('key1') == 'value1'
|
||||
assert len(pcp.cache) == 1
|
||||
|
||||
def test_store_ignore_expired(self, pcp):
|
||||
pcp.store('key1', 'value1', int(time.time()) - 1)
|
||||
assert len(pcp.cache) == 0
|
||||
assert pcp.get('key1') is None
|
||||
assert len(pcp.cache) == 0
|
||||
|
||||
def test_store_override_existing_key(self, ie, logger):
|
||||
MAX_SIZE = 2
|
||||
pcp = MemoryLRUPCP(ie, logger, {}, initialize_cache=lambda max_size: (OrderedDict(), threading.Lock(), MAX_SIZE))
|
||||
pcp.store('key1', 'value1', int(time.time()) + 60)
|
||||
pcp.store('key2', 'value2', int(time.time()) + 60)
|
||||
assert len(pcp.cache) == 2
|
||||
pcp.store('key1', 'value2', int(time.time()) + 60)
|
||||
# Ensure that the override key gets added to the end of the cache instead of in the same position
|
||||
pcp.store('key3', 'value3', int(time.time()) + 60)
|
||||
assert pcp.get('key1') == 'value2'
|
||||
|
||||
def test_store_ignore_expired_existing_key(self, pcp):
|
||||
pcp.store('key1', 'value2', int(time.time()) + 60)
|
||||
pcp.store('key1', 'value1', int(time.time()) - 1)
|
||||
assert len(pcp.cache) == 1
|
||||
assert pcp.get('key1') == 'value2'
|
||||
assert len(pcp.cache) == 1
|
||||
|
||||
def test_get_key_expired(self, pcp):
|
||||
pcp.store('key1', 'value1', int(time.time()) + 60)
|
||||
assert pcp.get('key1') == 'value1'
|
||||
assert len(pcp.cache) == 1
|
||||
pcp.cache['key1'] = ('value1', int(time.time()) - 1)
|
||||
assert pcp.get('key1') is None
|
||||
assert len(pcp.cache) == 0
|
||||
|
||||
def test_lru_eviction(self, ie, logger):
|
||||
MAX_SIZE = 2
|
||||
provider = MemoryLRUPCP(ie, logger, {}, initialize_cache=lambda max_size: (OrderedDict(), threading.Lock(), MAX_SIZE))
|
||||
provider.store('key1', 'value1', int(time.time()) + 5)
|
||||
provider.store('key2', 'value2', int(time.time()) + 5)
|
||||
assert len(provider.cache) == 2
|
||||
|
||||
assert provider.get('key1') == 'value1'
|
||||
|
||||
provider.store('key3', 'value3', int(time.time()) + 5)
|
||||
assert len(provider.cache) == 2
|
||||
|
||||
assert provider.get('key2') is None
|
||||
|
||||
provider.store('key4', 'value4', int(time.time()) + 5)
|
||||
assert len(provider.cache) == 2
|
||||
|
||||
assert provider.get('key1') is None
|
||||
assert provider.get('key3') == 'value3'
|
||||
assert provider.get('key4') == 'value4'
|
||||
|
||||
def test_delete(self, pcp):
|
||||
pcp.store('key1', 'value1', int(time.time()) + 5)
|
||||
assert len(pcp.cache) == 1
|
||||
assert pcp.get('key1') == 'value1'
|
||||
pcp.delete('key1')
|
||||
assert len(pcp.cache) == 0
|
||||
assert pcp.get('key1') is None
|
||||
|
||||
def test_use_global_cache_default(self, ie, logger):
|
||||
pcp = MemoryLRUPCP(ie, logger, {})
|
||||
assert pcp.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||
assert pcp.cache is _pot_memory_cache.value['cache']
|
||||
assert pcp.lock is _pot_memory_cache.value['lock']
|
||||
|
||||
pcp2 = MemoryLRUPCP(ie, logger, {})
|
||||
assert pcp.max_size == pcp2.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||
assert pcp.cache is pcp2.cache is _pot_memory_cache.value['cache']
|
||||
assert pcp.lock is pcp2.lock is _pot_memory_cache.value['lock']
|
||||
|
||||
def test_fail_max_size_change_global(self, ie, logger):
|
||||
pcp = MemoryLRUPCP(ie, logger, {})
|
||||
assert pcp.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||
with pytest.raises(ValueError, match='Cannot change max_size of initialized global memory cache'):
|
||||
initialize_global_cache(50)
|
||||
|
||||
assert pcp.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||
|
||||
def test_memory_lru_preference(self, pcp, ie, pot_request):
|
||||
assert memorylru_preference(pcp, pot_request) == 10000
|
||||
@ -0,0 +1,47 @@
|
||||
import pytest
|
||||
from yt_dlp.extractor.youtube.pot.provider import (
|
||||
PoTokenContext,
|
||||
|
||||
)
|
||||
|
||||
from yt_dlp.extractor.youtube.pot.utils import get_webpo_content_binding, ContentBindingType
|
||||
|
||||
|
||||
class TestGetWebPoContentBinding:
|
||||
|
||||
@pytest.mark.parametrize('client_name, context, is_authenticated, expected', [
|
||||
*[(client, context, is_authenticated, expected) for client in [
|
||||
'WEB', 'MWEB', 'TVHTML5', 'WEB_EMBEDDED_PLAYER', 'WEB_CREATOR', 'TVHTML5_SIMPLY_EMBEDDED_PLAYER', 'TVHTML5_SIMPLY']
|
||||
for context, is_authenticated, expected in [
|
||||
(PoTokenContext.GVS, False, ('example-visitor-data', ContentBindingType.VISITOR_DATA)),
|
||||
(PoTokenContext.PLAYER, False, ('example-video-id', ContentBindingType.VIDEO_ID)),
|
||||
(PoTokenContext.SUBS, False, ('example-video-id', ContentBindingType.VIDEO_ID)),
|
||||
(PoTokenContext.GVS, True, ('example-data-sync-id', ContentBindingType.DATASYNC_ID)),
|
||||
]],
|
||||
('WEB_REMIX', PoTokenContext.GVS, False, ('example-visitor-data', ContentBindingType.VISITOR_DATA)),
|
||||
('WEB_REMIX', PoTokenContext.PLAYER, False, ('example-visitor-data', ContentBindingType.VISITOR_DATA)),
|
||||
('ANDROID', PoTokenContext.GVS, False, (None, None)),
|
||||
('IOS', PoTokenContext.GVS, False, (None, None)),
|
||||
])
|
||||
def test_get_webpo_content_binding(self, pot_request, client_name, context, is_authenticated, expected):
|
||||
pot_request.innertube_context['client']['clientName'] = client_name
|
||||
pot_request.context = context
|
||||
pot_request.is_authenticated = is_authenticated
|
||||
assert get_webpo_content_binding(pot_request) == expected
|
||||
|
||||
def test_extract_visitor_id(self, pot_request):
|
||||
pot_request.visitor_data = 'CgsxMjNhYmNYWVpfLSiA4s%2DqBg%3D%3D'
|
||||
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == ('123abcXYZ_-', ContentBindingType.VISITOR_ID)
|
||||
|
||||
def test_invalid_visitor_id(self, pot_request):
|
||||
# visitor id not alphanumeric (i.e. protobuf extraction failed)
|
||||
pot_request.visitor_data = 'CggxMjM0NTY3OCiA4s-qBg%3D%3D'
|
||||
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == (pot_request.visitor_data, ContentBindingType.VISITOR_DATA)
|
||||
|
||||
def test_no_visitor_id(self, pot_request):
|
||||
pot_request.visitor_data = 'KIDiz6oG'
|
||||
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == (pot_request.visitor_data, ContentBindingType.VISITOR_DATA)
|
||||
|
||||
def test_invalid_base64(self, pot_request):
|
||||
pot_request.visitor_data = 'invalid-base64'
|
||||
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == (pot_request.visitor_data, ContentBindingType.VISITOR_DATA)
|
||||
@ -0,0 +1,92 @@
|
||||
import pytest
|
||||
|
||||
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider, BuiltinIEContentProvider
|
||||
from yt_dlp.extractor.youtube.pot.cache import CacheProviderWritePolicy
|
||||
from yt_dlp.utils import bug_reports_message
|
||||
from yt_dlp.extractor.youtube.pot.provider import (
|
||||
PoTokenRequest,
|
||||
PoTokenContext,
|
||||
|
||||
)
|
||||
from yt_dlp.version import __version__
|
||||
|
||||
from yt_dlp.extractor.youtube.pot._builtin.webpo_cachespec import WebPoPCSP
|
||||
from yt_dlp.extractor.youtube.pot._registry import _pot_pcs_providers
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def pot_request(pot_request) -> PoTokenRequest:
|
||||
pot_request.visitor_data = 'CgsxMjNhYmNYWVpfLSiA4s%2DqBg%3D%3D' # visitor_id=123abcXYZ_-
|
||||
return pot_request
|
||||
|
||||
|
||||
class TestWebPoPCSP:
|
||||
def test_base_type(self):
|
||||
assert issubclass(WebPoPCSP, IEContentProvider)
|
||||
assert issubclass(WebPoPCSP, BuiltinIEContentProvider)
|
||||
|
||||
def test_init(self, ie, logger):
|
||||
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||
assert pcs.PROVIDER_NAME == 'webpo'
|
||||
assert pcs.PROVIDER_VERSION == __version__
|
||||
assert pcs.BUG_REPORT_MESSAGE == bug_reports_message(before='')
|
||||
assert pcs.is_available()
|
||||
|
||||
def test_is_registered(self):
|
||||
assert _pot_pcs_providers.value.get('WebPo') == WebPoPCSP
|
||||
|
||||
@pytest.mark.parametrize('client_name, context, is_authenticated', [
|
||||
('ANDROID', PoTokenContext.GVS, False),
|
||||
('IOS', PoTokenContext.GVS, False),
|
||||
('IOS', PoTokenContext.PLAYER, False),
|
||||
])
|
||||
def test_not_supports(self, ie, logger, pot_request, client_name, context, is_authenticated):
|
||||
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||
pot_request.innertube_context['client']['clientName'] = client_name
|
||||
pot_request.context = context
|
||||
pot_request.is_authenticated = is_authenticated
|
||||
assert pcs.generate_cache_spec(pot_request) is None
|
||||
|
||||
@pytest.mark.parametrize('client_name, context, is_authenticated, remote_host, source_address, request_proxy, expected', [
|
||||
*[(client, context, is_authenticated, remote_host, source_address, request_proxy, expected) for client in [
|
||||
'WEB', 'MWEB', 'TVHTML5', 'WEB_EMBEDDED_PLAYER', 'WEB_CREATOR', 'TVHTML5_SIMPLY_EMBEDDED_PLAYER', 'TVHTML5_SIMPLY']
|
||||
for context, is_authenticated, remote_host, source_address, request_proxy, expected in [
|
||||
(PoTokenContext.GVS, False, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': '123abcXYZ_-', 'cbt': 'visitor_id'}),
|
||||
(PoTokenContext.PLAYER, False, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': '123abcXYZ_-', 'cbt': 'video_id'}),
|
||||
(PoTokenContext.GVS, True, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': 'example-data-sync-id', 'cbt': 'datasync_id'}),
|
||||
]],
|
||||
('WEB_REMIX', PoTokenContext.PLAYER, False, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': '123abcXYZ_-', 'cbt': 'visitor_id'}),
|
||||
('WEB', PoTokenContext.GVS, False, None, None, None, {'t': 'webpo', 'cb': '123abcXYZ_-', 'cbt': 'visitor_id', 'ip': None, 'sa': None, 'px': None}),
|
||||
('TVHTML5', PoTokenContext.PLAYER, False, None, None, 'http://example.com', {'t': 'webpo', 'cb': '123abcXYZ_-', 'cbt': 'video_id', 'ip': None, 'sa': None, 'px': 'http://example.com'}),
|
||||
|
||||
])
|
||||
def test_generate_key_bindings(self, ie, logger, pot_request, client_name, context, is_authenticated, remote_host, source_address, request_proxy, expected):
|
||||
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||
pot_request.innertube_context['client']['clientName'] = client_name
|
||||
pot_request.context = context
|
||||
pot_request.is_authenticated = is_authenticated
|
||||
pot_request.innertube_context['client']['remoteHost'] = remote_host
|
||||
pot_request.request_source_address = source_address
|
||||
pot_request.request_proxy = request_proxy
|
||||
pot_request.video_id = '123abcXYZ_-' # same as visitor id to test type
|
||||
|
||||
assert pcs.generate_cache_spec(pot_request).key_bindings == expected
|
||||
|
||||
def test_no_bind_visitor_id(self, ie, logger, pot_request):
|
||||
# Should not bind to visitor id if setting is set to False
|
||||
pcs = WebPoPCSP(ie=ie, logger=logger, settings={'bind_to_visitor_id': ['false']})
|
||||
pot_request.innertube_context['client']['clientName'] = 'WEB'
|
||||
pot_request.context = PoTokenContext.GVS
|
||||
pot_request.is_authenticated = False
|
||||
assert pcs.generate_cache_spec(pot_request).key_bindings == {'t': 'webpo', 'ip': None, 'sa': None, 'px': None, 'cb': 'CgsxMjNhYmNYWVpfLSiA4s%2DqBg%3D%3D', 'cbt': 'visitor_data'}
|
||||
|
||||
def test_default_ttl(self, ie, logger, pot_request):
|
||||
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||
assert pcs.generate_cache_spec(pot_request).default_ttl == 6 * 60 * 60 # should default to 6 hours
|
||||
|
||||
def test_write_policy(self, ie, logger, pot_request):
|
||||
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||
pot_request.context = PoTokenContext.GVS
|
||||
assert pcs.generate_cache_spec(pot_request).write_policy == CacheProviderWritePolicy.WRITE_ALL
|
||||
pot_request.context = PoTokenContext.PLAYER
|
||||
assert pcs.generate_cache_spec(pot_request).write_policy == CacheProviderWritePolicy.WRITE_FIRST
|
||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,629 @@
|
||||
import pytest
|
||||
|
||||
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider
|
||||
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||
from yt_dlp.extractor.youtube.pot.provider import (
|
||||
PoTokenRequest,
|
||||
PoTokenContext,
|
||||
ExternalRequestFeature,
|
||||
|
||||
)
|
||||
|
||||
from yt_dlp.extractor.youtube.pot.cache import (
|
||||
PoTokenCacheProvider,
|
||||
PoTokenCacheSpec,
|
||||
PoTokenCacheSpecProvider,
|
||||
CacheProviderWritePolicy,
|
||||
)
|
||||
|
||||
import yt_dlp.extractor.youtube.pot.cache as cache
|
||||
|
||||
from yt_dlp.networking import Request
|
||||
from yt_dlp.extractor.youtube.pot.provider import (
|
||||
PoTokenResponse,
|
||||
PoTokenProvider,
|
||||
PoTokenProviderRejectedRequest,
|
||||
provider_bug_report_message,
|
||||
register_provider,
|
||||
register_preference,
|
||||
)
|
||||
|
||||
from yt_dlp.extractor.youtube.pot._registry import _pot_providers, _ptp_preferences, _pot_pcs_providers, _pot_cache_providers, _pot_cache_provider_preferences
|
||||
|
||||
|
||||
class ExamplePTP(PoTokenProvider):
|
||||
PROVIDER_NAME = 'example'
|
||||
PROVIDER_VERSION = '0.0.1'
|
||||
BUG_REPORT_LOCATION = 'https://example.com/issues'
|
||||
|
||||
_SUPPORTED_CLIENTS = ('WEB',)
|
||||
_SUPPORTED_CONTEXTS = (PoTokenContext.GVS, )
|
||||
|
||||
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = (
|
||||
ExternalRequestFeature.PROXY_SCHEME_HTTP,
|
||||
ExternalRequestFeature.PROXY_SCHEME_SOCKS5H,
|
||||
)
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||
return PoTokenResponse('example-token', expires_at=123)
|
||||
|
||||
|
||||
class ExampleCacheProviderPCP(PoTokenCacheProvider):
|
||||
|
||||
PROVIDER_NAME = 'example'
|
||||
PROVIDER_VERSION = '0.0.1'
|
||||
BUG_REPORT_LOCATION = 'https://example.com/issues'
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
def get(self, key: str):
|
||||
return 'example-cache'
|
||||
|
||||
def store(self, key: str, value: str, expires_at: int):
|
||||
pass
|
||||
|
||||
def delete(self, key: str):
|
||||
pass
|
||||
|
||||
|
||||
class ExampleCacheSpecProviderPCSP(PoTokenCacheSpecProvider):
|
||||
|
||||
PROVIDER_NAME = 'example'
|
||||
PROVIDER_VERSION = '0.0.1'
|
||||
BUG_REPORT_LOCATION = 'https://example.com/issues'
|
||||
|
||||
def generate_cache_spec(self, request: PoTokenRequest):
|
||||
return PoTokenCacheSpec(
|
||||
key_bindings={'field': 'example-key'},
|
||||
default_ttl=60,
|
||||
write_policy=CacheProviderWritePolicy.WRITE_FIRST,
|
||||
)
|
||||
|
||||
|
||||
class TestPoTokenProvider:
|
||||
|
||||
def test_base_type(self):
|
||||
assert issubclass(PoTokenProvider, IEContentProvider)
|
||||
|
||||
def test_create_provider_missing_fetch_method(self, ie, logger):
|
||||
class MissingMethodsPTP(PoTokenProvider):
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
MissingMethodsPTP(ie=ie, logger=logger, settings={})
|
||||
|
||||
def test_create_provider_missing_available_method(self, ie, logger):
|
||||
class MissingMethodsPTP(PoTokenProvider):
|
||||
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||
raise PoTokenProviderRejectedRequest('Not implemented')
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
MissingMethodsPTP(ie=ie, logger=logger, settings={})
|
||||
|
||||
def test_barebones_provider(self, ie, logger):
|
||||
class BarebonesProviderPTP(PoTokenProvider):
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||
raise PoTokenProviderRejectedRequest('Not implemented')
|
||||
|
||||
provider = BarebonesProviderPTP(ie=ie, logger=logger, settings={})
|
||||
assert provider.PROVIDER_NAME == 'BarebonesProvider'
|
||||
assert provider.PROVIDER_KEY == 'BarebonesProvider'
|
||||
assert provider.PROVIDER_VERSION == '0.0.0'
|
||||
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at (developer has not provided a bug report location) .'
|
||||
|
||||
def test_example_provider_success(self, ie, logger, pot_request):
|
||||
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||
assert provider.PROVIDER_NAME == 'example'
|
||||
assert provider.PROVIDER_KEY == 'Example'
|
||||
assert provider.PROVIDER_VERSION == '0.0.1'
|
||||
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at https://example.com/issues .'
|
||||
assert provider.is_available()
|
||||
|
||||
response = provider.request_pot(pot_request)
|
||||
|
||||
assert response.po_token == 'example-token'
|
||||
assert response.expires_at == 123
|
||||
|
||||
def test_provider_unsupported_context(self, ie, logger, pot_request):
|
||||
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||
pot_request.context = PoTokenContext.PLAYER
|
||||
|
||||
with pytest.raises(PoTokenProviderRejectedRequest):
|
||||
provider.request_pot(pot_request)
|
||||
|
||||
def test_provider_unsupported_client(self, ie, logger, pot_request):
|
||||
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||
pot_request.innertube_context['client']['clientName'] = 'ANDROID'
|
||||
|
||||
with pytest.raises(PoTokenProviderRejectedRequest):
|
||||
provider.request_pot(pot_request)
|
||||
|
||||
def test_provider_unsupported_proxy_scheme(self, ie, logger, pot_request):
|
||||
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||
pot_request.request_proxy = 'socks4://example.com'
|
||||
|
||||
with pytest.raises(
|
||||
PoTokenProviderRejectedRequest,
|
||||
match=r'External requests by "example" provider do not support proxy scheme "socks4"\. Supported proxy '
|
||||
'schemes: http, socks5h',
|
||||
):
|
||||
provider.request_pot(pot_request)
|
||||
|
||||
pot_request.request_proxy = 'http://example.com'
|
||||
|
||||
assert provider.request_pot(pot_request)
|
||||
|
||||
def test_provider_ignore_external_request_features(self, ie, logger, pot_request):
|
||||
class InternalPTP(ExamplePTP):
|
||||
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = None
|
||||
|
||||
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||
|
||||
pot_request.request_proxy = 'socks5://example.com'
|
||||
assert provider.request_pot(pot_request)
|
||||
pot_request.request_source_address = '0.0.0.0'
|
||||
assert provider.request_pot(pot_request)
|
||||
|
||||
def test_provider_unsupported_external_request_source_address(self, ie, logger, pot_request):
|
||||
class InternalPTP(ExamplePTP):
|
||||
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = tuple()
|
||||
|
||||
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||
|
||||
pot_request.request_source_address = None
|
||||
assert provider.request_pot(pot_request)
|
||||
|
||||
pot_request.request_source_address = '0.0.0.0'
|
||||
with pytest.raises(
|
||||
PoTokenProviderRejectedRequest,
|
||||
match='External requests by "example" provider do not support setting source address',
|
||||
):
|
||||
provider.request_pot(pot_request)
|
||||
|
||||
def test_provider_supported_external_request_source_address(self, ie, logger, pot_request):
|
||||
class InternalPTP(ExamplePTP):
|
||||
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = (
|
||||
ExternalRequestFeature.SOURCE_ADDRESS,
|
||||
)
|
||||
|
||||
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||
|
||||
pot_request.request_source_address = None
|
||||
assert provider.request_pot(pot_request)
|
||||
|
||||
pot_request.request_source_address = '0.0.0.0'
|
||||
assert provider.request_pot(pot_request)
|
||||
|
||||
def test_provider_unsupported_external_request_tls_verification(self, ie, logger, pot_request):
|
||||
class InternalPTP(ExamplePTP):
|
||||
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = tuple()
|
||||
|
||||
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||
|
||||
pot_request.request_verify_tls = True
|
||||
assert provider.request_pot(pot_request)
|
||||
|
||||
pot_request.request_verify_tls = False
|
||||
with pytest.raises(
|
||||
PoTokenProviderRejectedRequest,
|
||||
match='External requests by "example" provider do not support ignoring TLS certificate failures',
|
||||
):
|
||||
provider.request_pot(pot_request)
|
||||
|
||||
def test_provider_supported_external_request_tls_verification(self, ie, logger, pot_request):
|
||||
class InternalPTP(ExamplePTP):
|
||||
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = (
|
||||
ExternalRequestFeature.DISABLE_TLS_VERIFICATION,
|
||||
)
|
||||
|
||||
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||
|
||||
pot_request.request_verify_tls = True
|
||||
assert provider.request_pot(pot_request)
|
||||
|
||||
pot_request.request_verify_tls = False
|
||||
assert provider.request_pot(pot_request)
|
||||
|
||||
def test_provider_request_webpage(self, ie, logger, pot_request):
|
||||
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||
|
||||
cookiejar = YoutubeDLCookieJar()
|
||||
pot_request.request_headers = HTTPHeaderDict({'User-Agent': 'example-user-agent'})
|
||||
pot_request.request_proxy = 'socks5://example-proxy.com'
|
||||
pot_request.request_cookiejar = cookiejar
|
||||
|
||||
def mock_urlopen(request):
|
||||
return request
|
||||
|
||||
ie._downloader.urlopen = mock_urlopen
|
||||
|
||||
sent_request = provider._request_webpage(Request(
|
||||
'https://example.com',
|
||||
), pot_request=pot_request)
|
||||
|
||||
assert sent_request.url == 'https://example.com'
|
||||
assert sent_request.headers['User-Agent'] == 'example-user-agent'
|
||||
assert sent_request.proxies == {'all': 'socks5://example-proxy.com'}
|
||||
assert sent_request.extensions['cookiejar'] is cookiejar
|
||||
assert 'Requesting webpage' in logger.messages['info']
|
||||
|
||||
def test_provider_request_webpage_override(self, ie, logger, pot_request):
|
||||
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||
|
||||
cookiejar_request = YoutubeDLCookieJar()
|
||||
pot_request.request_headers = HTTPHeaderDict({'User-Agent': 'example-user-agent'})
|
||||
pot_request.request_proxy = 'socks5://example-proxy.com'
|
||||
pot_request.request_cookiejar = cookiejar_request
|
||||
|
||||
def mock_urlopen(request):
|
||||
return request
|
||||
|
||||
ie._downloader.urlopen = mock_urlopen
|
||||
|
||||
sent_request = provider._request_webpage(Request(
|
||||
'https://example.com',
|
||||
headers={'User-Agent': 'override-user-agent-override'},
|
||||
proxies={'http': 'http://example-proxy-override.com'},
|
||||
extensions={'cookiejar': YoutubeDLCookieJar()},
|
||||
), pot_request=pot_request, note='Custom requesting webpage')
|
||||
|
||||
assert sent_request.url == 'https://example.com'
|
||||
assert sent_request.headers['User-Agent'] == 'override-user-agent-override'
|
||||
assert sent_request.proxies == {'http': 'http://example-proxy-override.com'}
|
||||
assert sent_request.extensions['cookiejar'] is not cookiejar_request
|
||||
assert 'Custom requesting webpage' in logger.messages['info']
|
||||
|
||||
def test_provider_request_webpage_no_log(self, ie, logger, pot_request):
|
||||
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||
|
||||
def mock_urlopen(request):
|
||||
return request
|
||||
|
||||
ie._downloader.urlopen = mock_urlopen
|
||||
|
||||
sent_request = provider._request_webpage(Request(
|
||||
'https://example.com',
|
||||
), note=False)
|
||||
|
||||
assert sent_request.url == 'https://example.com'
|
||||
assert 'info' not in logger.messages
|
||||
|
||||
def test_provider_request_webpage_no_pot_request(self, ie, logger):
|
||||
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||
|
||||
def mock_urlopen(request):
|
||||
return request
|
||||
|
||||
ie._downloader.urlopen = mock_urlopen
|
||||
|
||||
sent_request = provider._request_webpage(Request(
|
||||
'https://example.com',
|
||||
), pot_request=None)
|
||||
|
||||
assert sent_request.url == 'https://example.com'
|
||||
|
||||
def test_get_config_arg(self, ie, logger):
|
||||
provider = ExamplePTP(ie=ie, logger=logger, settings={'abc': ['123D'], 'xyz': ['456a', '789B']})
|
||||
|
||||
assert provider._configuration_arg('abc') == ['123d']
|
||||
assert provider._configuration_arg('abc', default=['default']) == ['123d']
|
||||
assert provider._configuration_arg('ABC', default=['default']) == ['default']
|
||||
assert provider._configuration_arg('abc', casesense=True) == ['123D']
|
||||
assert provider._configuration_arg('xyz', casesense=False) == ['456a', '789b']
|
||||
|
||||
def test_require_class_end_with_suffix(self, ie, logger):
|
||||
class InvalidSuffix(PoTokenProvider):
|
||||
PROVIDER_NAME = 'invalid-suffix'
|
||||
|
||||
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||
raise PoTokenProviderRejectedRequest('Not implemented')
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
provider = InvalidSuffix(ie=ie, logger=logger, settings={})
|
||||
|
||||
with pytest.raises(AssertionError):
|
||||
provider.PROVIDER_KEY # noqa: B018
|
||||
|
||||
|
||||
class TestPoTokenCacheProvider:
|
||||
|
||||
def test_base_type(self):
|
||||
assert issubclass(PoTokenCacheProvider, IEContentProvider)
|
||||
|
||||
def test_create_provider_missing_get_method(self, ie, logger):
|
||||
class MissingMethodsPCP(PoTokenCacheProvider):
|
||||
def store(self, key: str, value: str, expires_at: int):
|
||||
pass
|
||||
|
||||
def delete(self, key: str):
|
||||
pass
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
MissingMethodsPCP(ie=ie, logger=logger, settings={})
|
||||
|
||||
def test_create_provider_missing_store_method(self, ie, logger):
|
||||
class MissingMethodsPCP(PoTokenCacheProvider):
|
||||
def get(self, key: str):
|
||||
pass
|
||||
|
||||
def delete(self, key: str):
|
||||
pass
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
MissingMethodsPCP(ie=ie, logger=logger, settings={})
|
||||
|
||||
def test_create_provider_missing_delete_method(self, ie, logger):
|
||||
class MissingMethodsPCP(PoTokenCacheProvider):
|
||||
def get(self, key: str):
|
||||
pass
|
||||
|
||||
def store(self, key: str, value: str, expires_at: int):
|
||||
pass
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
MissingMethodsPCP(ie=ie, logger=logger, settings={})
|
||||
|
||||
def test_create_provider_missing_is_available_method(self, ie, logger):
|
||||
class MissingMethodsPCP(PoTokenCacheProvider):
|
||||
def get(self, key: str):
|
||||
pass
|
||||
|
||||
def store(self, key: str, value: str, expires_at: int):
|
||||
pass
|
||||
|
||||
def delete(self, key: str):
|
||||
pass
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
MissingMethodsPCP(ie=ie, logger=logger, settings={})
|
||||
|
||||
def test_barebones_provider(self, ie, logger):
|
||||
class BarebonesProviderPCP(PoTokenCacheProvider):
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
def get(self, key: str):
|
||||
return 'example-cache'
|
||||
|
||||
def store(self, key: str, value: str, expires_at: int):
|
||||
pass
|
||||
|
||||
def delete(self, key: str):
|
||||
pass
|
||||
|
||||
provider = BarebonesProviderPCP(ie=ie, logger=logger, settings={})
|
||||
assert provider.PROVIDER_NAME == 'BarebonesProvider'
|
||||
assert provider.PROVIDER_KEY == 'BarebonesProvider'
|
||||
assert provider.PROVIDER_VERSION == '0.0.0'
|
||||
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at (developer has not provided a bug report location) .'
|
||||
|
||||
def test_create_provider_example(self, ie, logger):
|
||||
provider = ExampleCacheProviderPCP(ie=ie, logger=logger, settings={})
|
||||
assert provider.PROVIDER_NAME == 'example'
|
||||
assert provider.PROVIDER_KEY == 'ExampleCacheProvider'
|
||||
assert provider.PROVIDER_VERSION == '0.0.1'
|
||||
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at https://example.com/issues .'
|
||||
assert provider.is_available()
|
||||
|
||||
def test_get_config_arg(self, ie, logger):
|
||||
provider = ExampleCacheProviderPCP(ie=ie, logger=logger, settings={'abc': ['123D'], 'xyz': ['456a', '789B']})
|
||||
assert provider._configuration_arg('abc') == ['123d']
|
||||
assert provider._configuration_arg('abc', default=['default']) == ['123d']
|
||||
assert provider._configuration_arg('ABC', default=['default']) == ['default']
|
||||
assert provider._configuration_arg('abc', casesense=True) == ['123D']
|
||||
assert provider._configuration_arg('xyz', casesense=False) == ['456a', '789b']
|
||||
|
||||
def test_require_class_end_with_suffix(self, ie, logger):
|
||||
class InvalidSuffix(PoTokenCacheProvider):
|
||||
def get(self, key: str):
|
||||
return 'example-cache'
|
||||
|
||||
def store(self, key: str, value: str, expires_at: int):
|
||||
pass
|
||||
|
||||
def delete(self, key: str):
|
||||
pass
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
provider = InvalidSuffix(ie=ie, logger=logger, settings={})
|
||||
|
||||
with pytest.raises(AssertionError):
|
||||
provider.PROVIDER_KEY # noqa: B018
|
||||
|
||||
|
||||
class TestPoTokenCacheSpecProvider:
|
||||
|
||||
def test_base_type(self):
|
||||
assert issubclass(PoTokenCacheSpecProvider, IEContentProvider)
|
||||
|
||||
def test_create_provider_missing_supports_method(self, ie, logger):
|
||||
class MissingMethodsPCS(PoTokenCacheSpecProvider):
|
||||
pass
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
MissingMethodsPCS(ie=ie, logger=logger, settings={})
|
||||
|
||||
def test_create_provider_barebones(self, ie, pot_request, logger):
|
||||
class BarebonesProviderPCSP(PoTokenCacheSpecProvider):
|
||||
def generate_cache_spec(self, request: PoTokenRequest):
|
||||
return PoTokenCacheSpec(
|
||||
default_ttl=100,
|
||||
key_bindings={},
|
||||
)
|
||||
|
||||
provider = BarebonesProviderPCSP(ie=ie, logger=logger, settings={})
|
||||
assert provider.PROVIDER_NAME == 'BarebonesProvider'
|
||||
assert provider.PROVIDER_KEY == 'BarebonesProvider'
|
||||
assert provider.PROVIDER_VERSION == '0.0.0'
|
||||
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at (developer has not provided a bug report location) .'
|
||||
assert provider.is_available()
|
||||
assert provider.generate_cache_spec(request=pot_request).default_ttl == 100
|
||||
assert provider.generate_cache_spec(request=pot_request).key_bindings == {}
|
||||
assert provider.generate_cache_spec(request=pot_request).write_policy == CacheProviderWritePolicy.WRITE_ALL
|
||||
|
||||
def test_create_provider_example(self, ie, pot_request, logger):
|
||||
provider = ExampleCacheSpecProviderPCSP(ie=ie, logger=logger, settings={})
|
||||
assert provider.PROVIDER_NAME == 'example'
|
||||
assert provider.PROVIDER_KEY == 'ExampleCacheSpecProvider'
|
||||
assert provider.PROVIDER_VERSION == '0.0.1'
|
||||
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at https://example.com/issues .'
|
||||
assert provider.is_available()
|
||||
assert provider.generate_cache_spec(pot_request)
|
||||
assert provider.generate_cache_spec(pot_request).key_bindings == {'field': 'example-key'}
|
||||
assert provider.generate_cache_spec(pot_request).default_ttl == 60
|
||||
assert provider.generate_cache_spec(pot_request).write_policy == CacheProviderWritePolicy.WRITE_FIRST
|
||||
|
||||
def test_get_config_arg(self, ie, logger):
|
||||
provider = ExampleCacheSpecProviderPCSP(ie=ie, logger=logger, settings={'abc': ['123D'], 'xyz': ['456a', '789B']})
|
||||
|
||||
assert provider._configuration_arg('abc') == ['123d']
|
||||
assert provider._configuration_arg('abc', default=['default']) == ['123d']
|
||||
assert provider._configuration_arg('ABC', default=['default']) == ['default']
|
||||
assert provider._configuration_arg('abc', casesense=True) == ['123D']
|
||||
assert provider._configuration_arg('xyz', casesense=False) == ['456a', '789b']
|
||||
|
||||
def test_require_class_end_with_suffix(self, ie, logger):
|
||||
class InvalidSuffix(PoTokenCacheSpecProvider):
|
||||
def generate_cache_spec(self, request: PoTokenRequest):
|
||||
return None
|
||||
|
||||
provider = InvalidSuffix(ie=ie, logger=logger, settings={})
|
||||
|
||||
with pytest.raises(AssertionError):
|
||||
provider.PROVIDER_KEY # noqa: B018
|
||||
|
||||
|
||||
class TestPoTokenRequest:
|
||||
def test_copy_request(self, pot_request):
|
||||
copied_request = pot_request.copy()
|
||||
|
||||
assert copied_request is not pot_request
|
||||
assert copied_request.context == pot_request.context
|
||||
assert copied_request.innertube_context == pot_request.innertube_context
|
||||
assert copied_request.innertube_context is not pot_request.innertube_context
|
||||
copied_request.innertube_context['client']['clientName'] = 'ANDROID'
|
||||
assert pot_request.innertube_context['client']['clientName'] != 'ANDROID'
|
||||
assert copied_request.innertube_host == pot_request.innertube_host
|
||||
assert copied_request.session_index == pot_request.session_index
|
||||
assert copied_request.player_url == pot_request.player_url
|
||||
assert copied_request.is_authenticated == pot_request.is_authenticated
|
||||
assert copied_request.visitor_data == pot_request.visitor_data
|
||||
assert copied_request.data_sync_id == pot_request.data_sync_id
|
||||
assert copied_request.video_id == pot_request.video_id
|
||||
assert copied_request.request_cookiejar is pot_request.request_cookiejar
|
||||
assert copied_request.request_proxy == pot_request.request_proxy
|
||||
assert copied_request.request_headers == pot_request.request_headers
|
||||
assert copied_request.request_headers is not pot_request.request_headers
|
||||
assert copied_request.request_timeout == pot_request.request_timeout
|
||||
assert copied_request.request_source_address == pot_request.request_source_address
|
||||
assert copied_request.request_verify_tls == pot_request.request_verify_tls
|
||||
assert copied_request.bypass_cache == pot_request.bypass_cache
|
||||
|
||||
|
||||
def test_provider_bug_report_message(ie, logger):
|
||||
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at https://example.com/issues .'
|
||||
|
||||
message = provider_bug_report_message(provider)
|
||||
assert message == '; please report this issue to the provider developer at https://example.com/issues .'
|
||||
|
||||
message_before = provider_bug_report_message(provider, before='custom message!')
|
||||
assert message_before == 'custom message! Please report this issue to the provider developer at https://example.com/issues .'
|
||||
|
||||
|
||||
def test_register_provider(ie):
|
||||
|
||||
@register_provider
|
||||
class UnavailableProviderPTP(PoTokenProvider):
|
||||
def is_available(self) -> bool:
|
||||
return False
|
||||
|
||||
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||
raise PoTokenProviderRejectedRequest('Not implemented')
|
||||
|
||||
assert _pot_providers.value.get('UnavailableProvider') == UnavailableProviderPTP
|
||||
_pot_providers.value.pop('UnavailableProvider')
|
||||
|
||||
|
||||
def test_register_pot_preference(ie):
|
||||
before = len(_ptp_preferences.value)
|
||||
|
||||
@register_preference(ExamplePTP)
|
||||
def unavailable_preference(provider: PoTokenProvider, request: PoTokenRequest):
|
||||
return 1
|
||||
|
||||
assert len(_ptp_preferences.value) == before + 1
|
||||
|
||||
|
||||
def test_register_cache_provider(ie):
|
||||
|
||||
@cache.register_provider
|
||||
class UnavailableCacheProviderPCP(PoTokenCacheProvider):
|
||||
def is_available(self) -> bool:
|
||||
return False
|
||||
|
||||
def get(self, key: str):
|
||||
return 'example-cache'
|
||||
|
||||
def store(self, key: str, value: str, expires_at: int):
|
||||
pass
|
||||
|
||||
def delete(self, key: str):
|
||||
pass
|
||||
|
||||
assert _pot_cache_providers.value.get('UnavailableCacheProvider') == UnavailableCacheProviderPCP
|
||||
_pot_cache_providers.value.pop('UnavailableCacheProvider')
|
||||
|
||||
|
||||
def test_register_cache_provider_spec(ie):
|
||||
|
||||
@cache.register_spec
|
||||
class UnavailableCacheProviderPCSP(PoTokenCacheSpecProvider):
|
||||
def is_available(self) -> bool:
|
||||
return False
|
||||
|
||||
def generate_cache_spec(self, request: PoTokenRequest):
|
||||
return None
|
||||
|
||||
assert _pot_pcs_providers.value.get('UnavailableCacheProvider') == UnavailableCacheProviderPCSP
|
||||
_pot_pcs_providers.value.pop('UnavailableCacheProvider')
|
||||
|
||||
|
||||
def test_register_cache_provider_preference(ie):
|
||||
before = len(_pot_cache_provider_preferences.value)
|
||||
|
||||
@cache.register_preference(ExampleCacheProviderPCP)
|
||||
def unavailable_preference(provider: PoTokenCacheProvider, request: PoTokenRequest):
|
||||
return 1
|
||||
|
||||
assert len(_pot_cache_provider_preferences.value) == before + 1
|
||||
|
||||
|
||||
def test_logger_log_level(logger):
|
||||
assert logger.LogLevel('INFO') == logger.LogLevel.INFO
|
||||
assert logger.LogLevel('debuG') == logger.LogLevel.DEBUG
|
||||
assert logger.LogLevel(10) == logger.LogLevel.DEBUG
|
||||
assert logger.LogLevel('UNKNOWN') == logger.LogLevel.INFO
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 3.8 KiB |
@ -1,285 +1,100 @@
|
||||
import functools
|
||||
import re
|
||||
|
||||
from .common import InfoExtractor
|
||||
from ..utils import (
|
||||
ISO639Utils,
|
||||
OnDemandPagedList,
|
||||
clean_html,
|
||||
determine_ext,
|
||||
float_or_none,
|
||||
int_or_none,
|
||||
join_nonempty,
|
||||
parse_duration,
|
||||
str_or_none,
|
||||
str_to_int,
|
||||
unified_strdate,
|
||||
url_or_none,
|
||||
)
|
||||
from ..utils.traversal import traverse_obj
|
||||
|
||||
|
||||
class AdobeTVBaseIE(InfoExtractor):
|
||||
def _call_api(self, path, video_id, query, note=None):
|
||||
return self._download_json(
|
||||
'http://tv.adobe.com/api/v4/' + path,
|
||||
video_id, note, query=query)['data']
|
||||
|
||||
def _parse_subtitles(self, video_data, url_key):
|
||||
subtitles = {}
|
||||
for translation in video_data.get('translations', []):
|
||||
vtt_path = translation.get(url_key)
|
||||
if not vtt_path:
|
||||
continue
|
||||
lang = translation.get('language_w3c') or ISO639Utils.long2short(translation['language_medium'])
|
||||
subtitles.setdefault(lang, []).append({
|
||||
'ext': 'vtt',
|
||||
'url': vtt_path,
|
||||
})
|
||||
return subtitles
|
||||
|
||||
def _parse_video_data(self, video_data):
|
||||
video_id = str(video_data['id'])
|
||||
title = video_data['title']
|
||||
|
||||
s3_extracted = False
|
||||
formats = []
|
||||
for source in video_data.get('videos', []):
|
||||
source_url = source.get('url')
|
||||
if not source_url:
|
||||
continue
|
||||
f = {
|
||||
'format_id': source.get('quality_level'),
|
||||
'fps': int_or_none(source.get('frame_rate')),
|
||||
'height': int_or_none(source.get('height')),
|
||||
'tbr': int_or_none(source.get('video_data_rate')),
|
||||
'width': int_or_none(source.get('width')),
|
||||
'url': source_url,
|
||||
}
|
||||
original_filename = source.get('original_filename')
|
||||
if original_filename:
|
||||
if not (f.get('height') and f.get('width')):
|
||||
mobj = re.search(r'_(\d+)x(\d+)', original_filename)
|
||||
if mobj:
|
||||
f.update({
|
||||
'height': int(mobj.group(2)),
|
||||
'width': int(mobj.group(1)),
|
||||
})
|
||||
if original_filename.startswith('s3://') and not s3_extracted:
|
||||
formats.append({
|
||||
'format_id': 'original',
|
||||
'quality': 1,
|
||||
'url': original_filename.replace('s3://', 'https://s3.amazonaws.com/'),
|
||||
})
|
||||
s3_extracted = True
|
||||
formats.append(f)
|
||||
|
||||
return {
|
||||
'id': video_id,
|
||||
'title': title,
|
||||
'description': video_data.get('description'),
|
||||
'thumbnail': video_data.get('thumbnail'),
|
||||
'upload_date': unified_strdate(video_data.get('start_date')),
|
||||
'duration': parse_duration(video_data.get('duration')),
|
||||
'view_count': str_to_int(video_data.get('playcount')),
|
||||
'formats': formats,
|
||||
'subtitles': self._parse_subtitles(video_data, 'vtt'),
|
||||
}
|
||||
|
||||
|
||||
class AdobeTVEmbedIE(AdobeTVBaseIE):
|
||||
IE_NAME = 'adobetv:embed'
|
||||
_VALID_URL = r'https?://tv\.adobe\.com/embed/\d+/(?P<id>\d+)'
|
||||
_TEST = {
|
||||
'url': 'https://tv.adobe.com/embed/22/4153',
|
||||
'md5': 'c8c0461bf04d54574fc2b4d07ac6783a',
|
||||
'info_dict': {
|
||||
'id': '4153',
|
||||
'ext': 'flv',
|
||||
'title': 'Creating Graphics Optimized for BlackBerry',
|
||||
'description': 'md5:eac6e8dced38bdaae51cd94447927459',
|
||||
'thumbnail': r're:https?://.*\.jpg$',
|
||||
'upload_date': '20091109',
|
||||
'duration': 377,
|
||||
'view_count': int,
|
||||
},
|
||||
}
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
|
||||
video_data = self._call_api(
|
||||
'episode/' + video_id, video_id, {'disclosure': 'standard'})[0]
|
||||
return self._parse_video_data(video_data)
|
||||
|
||||
|
||||
class AdobeTVIE(AdobeTVBaseIE):
|
||||
class AdobeTVVideoIE(InfoExtractor):
|
||||
IE_NAME = 'adobetv'
|
||||
_VALID_URL = r'https?://tv\.adobe\.com/(?:(?P<language>fr|de|es|jp)/)?watch/(?P<show_urlname>[^/]+)/(?P<id>[^/]+)'
|
||||
|
||||
_TEST = {
|
||||
'url': 'http://tv.adobe.com/watch/the-complete-picture-with-julieanne-kost/quick-tip-how-to-draw-a-circle-around-an-object-in-photoshop/',
|
||||
'md5': '9bc5727bcdd55251f35ad311ca74fa1e',
|
||||
'info_dict': {
|
||||
'id': '10981',
|
||||
'ext': 'mp4',
|
||||
'title': 'Quick Tip - How to Draw a Circle Around an Object in Photoshop',
|
||||
'description': 'md5:99ec318dc909d7ba2a1f2b038f7d2311',
|
||||
'thumbnail': r're:https?://.*\.jpg$',
|
||||
'upload_date': '20110914',
|
||||
'duration': 60,
|
||||
'view_count': int,
|
||||
},
|
||||
}
|
||||
|
||||
def _real_extract(self, url):
|
||||
language, show_urlname, urlname = self._match_valid_url(url).groups()
|
||||
if not language:
|
||||
language = 'en'
|
||||
|
||||
video_data = self._call_api(
|
||||
'episode/get', urlname, {
|
||||
'disclosure': 'standard',
|
||||
'language': language,
|
||||
'show_urlname': show_urlname,
|
||||
'urlname': urlname,
|
||||
})[0]
|
||||
return self._parse_video_data(video_data)
|
||||
|
||||
|
||||
class AdobeTVPlaylistBaseIE(AdobeTVBaseIE):
|
||||
_PAGE_SIZE = 25
|
||||
|
||||
def _fetch_page(self, display_id, query, page):
|
||||
page += 1
|
||||
query['page'] = page
|
||||
for element_data in self._call_api(
|
||||
self._RESOURCE, display_id, query, f'Download Page {page}'):
|
||||
yield self._process_data(element_data)
|
||||
|
||||
def _extract_playlist_entries(self, display_id, query):
|
||||
return OnDemandPagedList(functools.partial(
|
||||
self._fetch_page, display_id, query), self._PAGE_SIZE)
|
||||
|
||||
|
||||
class AdobeTVShowIE(AdobeTVPlaylistBaseIE):
|
||||
IE_NAME = 'adobetv:show'
|
||||
_VALID_URL = r'https?://tv\.adobe\.com/(?:(?P<language>fr|de|es|jp)/)?show/(?P<id>[^/]+)'
|
||||
|
||||
_TEST = {
|
||||
'url': 'http://tv.adobe.com/show/the-complete-picture-with-julieanne-kost',
|
||||
'info_dict': {
|
||||
'id': '36',
|
||||
'title': 'The Complete Picture with Julieanne Kost',
|
||||
'description': 'md5:fa50867102dcd1aa0ddf2ab039311b27',
|
||||
},
|
||||
'playlist_mincount': 136,
|
||||
}
|
||||
_RESOURCE = 'episode'
|
||||
_process_data = AdobeTVBaseIE._parse_video_data
|
||||
|
||||
def _real_extract(self, url):
|
||||
language, show_urlname = self._match_valid_url(url).groups()
|
||||
if not language:
|
||||
language = 'en'
|
||||
query = {
|
||||
'disclosure': 'standard',
|
||||
'language': language,
|
||||
'show_urlname': show_urlname,
|
||||
}
|
||||
|
||||
show_data = self._call_api(
|
||||
'show/get', show_urlname, query)[0]
|
||||
|
||||
return self.playlist_result(
|
||||
self._extract_playlist_entries(show_urlname, query),
|
||||
str_or_none(show_data.get('id')),
|
||||
show_data.get('show_name'),
|
||||
show_data.get('show_description'))
|
||||
|
||||
|
||||
class AdobeTVChannelIE(AdobeTVPlaylistBaseIE):
|
||||
IE_NAME = 'adobetv:channel'
|
||||
_VALID_URL = r'https?://tv\.adobe\.com/(?:(?P<language>fr|de|es|jp)/)?channel/(?P<id>[^/]+)(?:/(?P<category_urlname>[^/]+))?'
|
||||
|
||||
_TEST = {
|
||||
'url': 'http://tv.adobe.com/channel/development',
|
||||
'info_dict': {
|
||||
'id': 'development',
|
||||
},
|
||||
'playlist_mincount': 96,
|
||||
}
|
||||
_RESOURCE = 'show'
|
||||
|
||||
def _process_data(self, show_data):
|
||||
return self.url_result(
|
||||
show_data['url'], 'AdobeTVShow', str_or_none(show_data.get('id')))
|
||||
|
||||
def _real_extract(self, url):
|
||||
language, channel_urlname, category_urlname = self._match_valid_url(url).groups()
|
||||
if not language:
|
||||
language = 'en'
|
||||
query = {
|
||||
'channel_urlname': channel_urlname,
|
||||
'language': language,
|
||||
}
|
||||
if category_urlname:
|
||||
query['category_urlname'] = category_urlname
|
||||
|
||||
return self.playlist_result(
|
||||
self._extract_playlist_entries(channel_urlname, query),
|
||||
channel_urlname)
|
||||
|
||||
|
||||
class AdobeTVVideoIE(AdobeTVBaseIE):
|
||||
IE_NAME = 'adobetv:video'
|
||||
_VALID_URL = r'https?://video\.tv\.adobe\.com/v/(?P<id>\d+)'
|
||||
_EMBED_REGEX = [r'<iframe[^>]+src=[\'"](?P<url>(?:https?:)?//video\.tv\.adobe\.com/v/\d+[^"]+)[\'"]']
|
||||
|
||||
_TEST = {
|
||||
# From https://helpx.adobe.com/acrobat/how-to/new-experience-acrobat-dc.html?set=acrobat--get-started--essential-beginners
|
||||
'url': 'https://video.tv.adobe.com/v/2456/',
|
||||
_EMBED_REGEX = [r'<iframe[^>]+src=["\'](?P<url>(?:https?:)?//video\.tv\.adobe\.com/v/\d+)']
|
||||
_TESTS = [{
|
||||
'url': 'https://video.tv.adobe.com/v/2456',
|
||||
'md5': '43662b577c018ad707a63766462b1e87',
|
||||
'info_dict': {
|
||||
'id': '2456',
|
||||
'ext': 'mp4',
|
||||
'title': 'New experience with Acrobat DC',
|
||||
'description': 'New experience with Acrobat DC',
|
||||
'duration': 248.667,
|
||||
'duration': 248.522,
|
||||
'thumbnail': r're:https?://images-tv\.adobe\.com/.+\.jpg',
|
||||
},
|
||||
}
|
||||
}, {
|
||||
'url': 'https://video.tv.adobe.com/v/3463980/adobe-acrobat',
|
||||
'info_dict': {
|
||||
'id': '3463980',
|
||||
'ext': 'mp4',
|
||||
'title': 'Adobe Acrobat: How to Customize the Toolbar for Faster PDF Editing',
|
||||
'description': 'md5:94368ab95ae24f9c1bee0cb346e03dc3',
|
||||
'duration': 97.514,
|
||||
'thumbnail': r're:https?://images-tv\.adobe\.com/.+\.jpg',
|
||||
},
|
||||
}]
|
||||
_WEBPAGE_TESTS = [{
|
||||
# https://video.tv.adobe.com/v/3442499
|
||||
'url': 'https://business.adobe.com/dx-fragments/summit/2025/marquees/S335/ondemand.live.html',
|
||||
'info_dict': {
|
||||
'id': '3442499',
|
||||
'ext': 'mp4',
|
||||
'title': 'S335 - Beyond Personalization: Creating Intent-Based Experiences at Scale',
|
||||
'description': 'Beyond Personalization: Creating Intent-Based Experiences at Scale',
|
||||
'duration': 2906.8,
|
||||
'thumbnail': r're:https?://images-tv\.adobe\.com/.+\.jpg',
|
||||
},
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
webpage = self._download_webpage(url, video_id)
|
||||
|
||||
video_data = self._parse_json(self._search_regex(
|
||||
r'var\s+bridge\s*=\s*([^;]+);', webpage, 'bridged data'), video_id)
|
||||
title = video_data['title']
|
||||
video_data = self._search_json(
|
||||
r'var\s+bridge\s*=', webpage, 'bridged data', video_id)
|
||||
|
||||
formats = []
|
||||
sources = video_data.get('sources') or []
|
||||
for source in sources:
|
||||
source_src = source.get('src')
|
||||
if not source_src:
|
||||
continue
|
||||
formats.append({
|
||||
'filesize': int_or_none(source.get('kilobytes') or None, invscale=1000),
|
||||
'format_id': join_nonempty(source.get('format'), source.get('label')),
|
||||
'height': int_or_none(source.get('height') or None),
|
||||
'tbr': int_or_none(source.get('bitrate') or None),
|
||||
'width': int_or_none(source.get('width') or None),
|
||||
'url': source_src,
|
||||
})
|
||||
for source in traverse_obj(video_data, (
|
||||
'sources', lambda _, v: v['format'] != 'playlist' and url_or_none(v['src']),
|
||||
)):
|
||||
source_url = self._proto_relative_url(source['src'])
|
||||
if determine_ext(source_url) == 'm3u8':
|
||||
fmts = self._extract_m3u8_formats(
|
||||
source_url, video_id, 'mp4', m3u8_id='hls', fatal=False)
|
||||
else:
|
||||
fmts = [{'url': source_url}]
|
||||
|
||||
for fmt in fmts:
|
||||
fmt.update(traverse_obj(source, {
|
||||
'duration': ('duration', {float_or_none(scale=1000)}),
|
||||
'filesize': ('kilobytes', {float_or_none(invscale=1000)}),
|
||||
'format_id': (('format', 'label'), {str}, all, {lambda x: join_nonempty(*x)}),
|
||||
'height': ('height', {int_or_none}),
|
||||
'tbr': ('bitrate', {int_or_none}),
|
||||
'width': ('width', {int_or_none}),
|
||||
}))
|
||||
formats.extend(fmts)
|
||||
|
||||
# For both metadata and downloaded files the duration varies among
|
||||
# formats. I just pick the max one
|
||||
duration = max(filter(None, [
|
||||
float_or_none(source.get('duration'), scale=1000)
|
||||
for source in sources]))
|
||||
subtitles = {}
|
||||
for translation in traverse_obj(video_data, (
|
||||
'translations', lambda _, v: url_or_none(v['vttPath']),
|
||||
)):
|
||||
lang = translation.get('language_w3c') or ISO639Utils.long2short(translation.get('language_medium')) or 'und'
|
||||
subtitles.setdefault(lang, []).append({
|
||||
'ext': 'vtt',
|
||||
'url': self._proto_relative_url(translation['vttPath']),
|
||||
})
|
||||
|
||||
return {
|
||||
'id': video_id,
|
||||
'formats': formats,
|
||||
'title': title,
|
||||
'description': video_data.get('description'),
|
||||
'thumbnail': video_data.get('video', {}).get('poster'),
|
||||
'duration': duration,
|
||||
'subtitles': self._parse_subtitles(video_data, 'vttPath'),
|
||||
'subtitles': subtitles,
|
||||
**traverse_obj(video_data, {
|
||||
'title': ('title', {clean_html}),
|
||||
'description': ('description', {clean_html}, filter),
|
||||
'thumbnail': ('video', 'poster', {self._proto_relative_url}, {url_or_none}),
|
||||
}),
|
||||
}
|
||||
|
||||
@ -1,150 +0,0 @@
|
||||
from .common import InfoExtractor
|
||||
from ..utils import (
|
||||
ExtractorError,
|
||||
float_or_none,
|
||||
int_or_none,
|
||||
parse_iso8601,
|
||||
parse_qs,
|
||||
try_get,
|
||||
)
|
||||
|
||||
|
||||
class ArkenaIE(InfoExtractor):
|
||||
_VALID_URL = r'''(?x)
|
||||
https?://
|
||||
(?:
|
||||
video\.(?:arkena|qbrick)\.com/play2/embed/player\?|
|
||||
play\.arkena\.com/(?:config|embed)/avp/v\d/player/media/(?P<id>[^/]+)/[^/]+/(?P<account_id>\d+)
|
||||
)
|
||||
'''
|
||||
# See https://support.arkena.com/display/PLAY/Ways+to+embed+your+video
|
||||
_EMBED_REGEX = [r'<iframe[^>]+src=(["\'])(?P<url>(?:https?:)?//play\.arkena\.com/embed/avp/.+?)\1']
|
||||
_TESTS = [{
|
||||
'url': 'https://video.qbrick.com/play2/embed/player?accountId=1034090&mediaId=d8ab4607-00090107-aab86310',
|
||||
'md5': '97f117754e5f3c020f5f26da4a44ebaf',
|
||||
'info_dict': {
|
||||
'id': 'd8ab4607-00090107-aab86310',
|
||||
'ext': 'mp4',
|
||||
'title': 'EM_HT20_117_roslund_v2.mp4',
|
||||
'timestamp': 1608285912,
|
||||
'upload_date': '20201218',
|
||||
'duration': 1429.162667,
|
||||
'subtitles': {
|
||||
'sv': 'count:3',
|
||||
},
|
||||
},
|
||||
}, {
|
||||
'url': 'https://play.arkena.com/embed/avp/v2/player/media/b41dda37-d8e7-4d3f-b1b5-9a9db578bdfe/1/129411',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'https://play.arkena.com/config/avp/v2/player/media/b41dda37-d8e7-4d3f-b1b5-9a9db578bdfe/1/129411/?callbackMethod=jQuery1111023664739129262213_1469227693893',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'http://play.arkena.com/config/avp/v1/player/media/327336/darkmatter/131064/?callbackMethod=jQuery1111002221189684892677_1469227595972',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'http://play.arkena.com/embed/avp/v1/player/media/327336/darkmatter/131064/',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'http://video.arkena.com/play2/embed/player?accountId=472718&mediaId=35763b3b-00090078-bf604299&pageStyling=styled',
|
||||
'only_matching': True,
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
mobj = self._match_valid_url(url)
|
||||
video_id = mobj.group('id')
|
||||
account_id = mobj.group('account_id')
|
||||
|
||||
# Handle http://video.arkena.com/play2/embed/player URL
|
||||
if not video_id:
|
||||
qs = parse_qs(url)
|
||||
video_id = qs.get('mediaId', [None])[0]
|
||||
account_id = qs.get('accountId', [None])[0]
|
||||
if not video_id or not account_id:
|
||||
raise ExtractorError('Invalid URL', expected=True)
|
||||
|
||||
media = self._download_json(
|
||||
f'https://video.qbrick.com/api/v1/public/accounts/{account_id}/medias/{video_id}',
|
||||
video_id, query={
|
||||
# https://video.qbrick.com/docs/api/examples/library-api.html
|
||||
'fields': 'asset/resources/*/renditions/*(height,id,language,links/*(href,mimeType),type,size,videos/*(audios/*(codec,sampleRate),bitrate,codec,duration,height,width),width),created,metadata/*(title,description),tags',
|
||||
})
|
||||
metadata = media.get('metadata') or {}
|
||||
title = metadata['title']
|
||||
|
||||
duration = None
|
||||
formats = []
|
||||
thumbnails = []
|
||||
subtitles = {}
|
||||
for resource in media['asset']['resources']:
|
||||
for rendition in (resource.get('renditions') or []):
|
||||
rendition_type = rendition.get('type')
|
||||
for i, link in enumerate(rendition.get('links') or []):
|
||||
href = link.get('href')
|
||||
if not href:
|
||||
continue
|
||||
if rendition_type == 'image':
|
||||
thumbnails.append({
|
||||
'filesize': int_or_none(rendition.get('size')),
|
||||
'height': int_or_none(rendition.get('height')),
|
||||
'id': rendition.get('id'),
|
||||
'url': href,
|
||||
'width': int_or_none(rendition.get('width')),
|
||||
})
|
||||
elif rendition_type == 'subtitle':
|
||||
subtitles.setdefault(rendition.get('language') or 'en', []).append({
|
||||
'url': href,
|
||||
})
|
||||
elif rendition_type == 'video':
|
||||
f = {
|
||||
'filesize': int_or_none(rendition.get('size')),
|
||||
'format_id': rendition.get('id'),
|
||||
'url': href,
|
||||
}
|
||||
video = try_get(rendition, lambda x: x['videos'][i], dict)
|
||||
if video:
|
||||
if not duration:
|
||||
duration = float_or_none(video.get('duration'))
|
||||
f.update({
|
||||
'height': int_or_none(video.get('height')),
|
||||
'tbr': int_or_none(video.get('bitrate'), 1000),
|
||||
'vcodec': video.get('codec'),
|
||||
'width': int_or_none(video.get('width')),
|
||||
})
|
||||
audio = try_get(video, lambda x: x['audios'][0], dict)
|
||||
if audio:
|
||||
f.update({
|
||||
'acodec': audio.get('codec'),
|
||||
'asr': int_or_none(audio.get('sampleRate')),
|
||||
})
|
||||
formats.append(f)
|
||||
elif rendition_type == 'index':
|
||||
mime_type = link.get('mimeType')
|
||||
if mime_type == 'application/smil+xml':
|
||||
formats.extend(self._extract_smil_formats(
|
||||
href, video_id, fatal=False))
|
||||
elif mime_type == 'application/x-mpegURL':
|
||||
formats.extend(self._extract_m3u8_formats(
|
||||
href, video_id, 'mp4', 'm3u8_native',
|
||||
m3u8_id='hls', fatal=False))
|
||||
elif mime_type == 'application/hds+xml':
|
||||
formats.extend(self._extract_f4m_formats(
|
||||
href, video_id, f4m_id='hds', fatal=False))
|
||||
elif mime_type == 'application/dash+xml':
|
||||
formats.extend(self._extract_mpd_formats(
|
||||
href, video_id, mpd_id='dash', fatal=False))
|
||||
elif mime_type == 'application/vnd.ms-sstr+xml':
|
||||
formats.extend(self._extract_ism_formats(
|
||||
href, video_id, ism_id='mss', fatal=False))
|
||||
|
||||
return {
|
||||
'id': video_id,
|
||||
'title': title,
|
||||
'description': metadata.get('description'),
|
||||
'timestamp': parse_iso8601(media.get('created')),
|
||||
'thumbnails': thumbnails,
|
||||
'subtitles': subtitles,
|
||||
'duration': duration,
|
||||
'tags': media.get('tags'),
|
||||
'formats': formats,
|
||||
}
|
||||
@ -1,33 +0,0 @@
|
||||
from .brightcove import BrightcoveNewBaseIE
|
||||
from ..utils import extract_attributes
|
||||
|
||||
|
||||
class BandaiChannelIE(BrightcoveNewBaseIE):
|
||||
IE_NAME = 'bandaichannel'
|
||||
_VALID_URL = r'https?://(?:www\.)?b-ch\.com/titles/(?P<id>\d+/\d+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://www.b-ch.com/titles/514/001',
|
||||
'md5': 'a0f2d787baa5729bed71108257f613a4',
|
||||
'info_dict': {
|
||||
'id': '6128044564001',
|
||||
'ext': 'mp4',
|
||||
'title': 'メタルファイターMIKU 第1話',
|
||||
'timestamp': 1580354056,
|
||||
'uploader_id': '5797077852001',
|
||||
'upload_date': '20200130',
|
||||
'duration': 1387.733,
|
||||
},
|
||||
'params': {
|
||||
'skip_download': True,
|
||||
},
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
webpage = self._download_webpage(url, video_id)
|
||||
attrs = extract_attributes(self._search_regex(
|
||||
r'(<video-js[^>]+\bid="bcplayer"[^>]*>)', webpage, 'player'))
|
||||
bc = self._download_json(
|
||||
'https://pbifcd.b-ch.com/v1/playbackinfo/ST/70/' + attrs['data-info'],
|
||||
video_id, headers={'X-API-KEY': attrs['data-auth'].strip()})['bc']
|
||||
return self._parse_brightcove_metadata(bc, bc['id'])
|
||||
@ -1,91 +0,0 @@
|
||||
from .common import InfoExtractor
|
||||
|
||||
|
||||
class BellMediaIE(InfoExtractor):
|
||||
_VALID_URL = r'''(?x)https?://(?:www\.)?
|
||||
(?P<domain>
|
||||
(?:
|
||||
ctv|
|
||||
tsn|
|
||||
bnn(?:bloomberg)?|
|
||||
thecomedynetwork|
|
||||
discovery|
|
||||
discoveryvelocity|
|
||||
sciencechannel|
|
||||
investigationdiscovery|
|
||||
animalplanet|
|
||||
bravo|
|
||||
mtv|
|
||||
space|
|
||||
etalk|
|
||||
marilyn
|
||||
)\.ca|
|
||||
(?:much|cp24)\.com
|
||||
)/.*?(?:\b(?:vid(?:eoid)?|clipId)=|-vid|~|%7E|/(?:episode)?)(?P<id>[0-9]{6,})'''
|
||||
_TESTS = [{
|
||||
'url': 'https://www.bnnbloomberg.ca/video/david-cockfield-s-top-picks~1403070',
|
||||
'md5': '3e5b8e38370741d5089da79161646635',
|
||||
'info_dict': {
|
||||
'id': '1403070',
|
||||
'ext': 'flv',
|
||||
'title': 'David Cockfield\'s Top Picks',
|
||||
'description': 'md5:810f7f8c6a83ad5b48677c3f8e5bb2c3',
|
||||
'upload_date': '20180525',
|
||||
'timestamp': 1527288600,
|
||||
'season_id': '73997',
|
||||
'season': '2018',
|
||||
'thumbnail': 'http://images2.9c9media.com/image_asset/2018_5_25_baf30cbd-b28d-4a18-9903-4bb8713b00f5_PNG_956x536.jpg',
|
||||
'tags': [],
|
||||
'categories': ['ETFs'],
|
||||
'season_number': 8,
|
||||
'duration': 272.038,
|
||||
'series': 'Market Call Tonight',
|
||||
},
|
||||
}, {
|
||||
'url': 'http://www.thecomedynetwork.ca/video/player?vid=923582',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'http://www.tsn.ca/video/expectations-high-for-milos-raonic-at-us-open~939549',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'http://www.bnn.ca/video/berman-s-call-part-two-viewer-questions~939654',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'http://www.ctv.ca/YourMorning/Video/S1E6-Monday-August-29-2016-vid938009',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'http://www.much.com/shows/atmidnight/episode948007/tuesday-september-13-2016',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'http://www.much.com/shows/the-almost-impossible-gameshow/928979/episode-6',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'http://www.ctv.ca/DCs-Legends-of-Tomorrow/Video/S2E11-Turncoat-vid1051430',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'http://www.etalk.ca/video?videoid=663455',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'https://www.cp24.com/video?clipId=1982548',
|
||||
'only_matching': True,
|
||||
}]
|
||||
_DOMAINS = {
|
||||
'thecomedynetwork': 'comedy',
|
||||
'discoveryvelocity': 'discvel',
|
||||
'sciencechannel': 'discsci',
|
||||
'investigationdiscovery': 'invdisc',
|
||||
'animalplanet': 'aniplan',
|
||||
'etalk': 'ctv',
|
||||
'bnnbloomberg': 'bnn',
|
||||
'marilyn': 'ctv_marilyn',
|
||||
}
|
||||
|
||||
def _real_extract(self, url):
|
||||
domain, video_id = self._match_valid_url(url).groups()
|
||||
domain = domain.split('.')[0]
|
||||
return {
|
||||
'_type': 'url_transparent',
|
||||
'id': video_id,
|
||||
'url': f'9c9media:{self._DOMAINS.get(domain, domain)}_web:{video_id}',
|
||||
'ie_key': 'NineCNineMedia',
|
||||
}
|
||||
@ -1,79 +1,47 @@
|
||||
from .mtv import MTVServicesInfoExtractor
|
||||
from ..utils import unified_strdate
|
||||
|
||||
|
||||
class BetIE(MTVServicesInfoExtractor):
|
||||
_WORKING = False
|
||||
_VALID_URL = r'https?://(?:www\.)?bet\.com/(?:[^/]+/)+(?P<id>.+?)\.html'
|
||||
_TESTS = [
|
||||
{
|
||||
'url': 'http://www.bet.com/news/politics/2014/12/08/in-bet-exclusive-obama-talks-race-and-racism.html',
|
||||
'info_dict': {
|
||||
'id': '07e96bd3-8850-3051-b856-271b457f0ab8',
|
||||
'display_id': 'in-bet-exclusive-obama-talks-race-and-racism',
|
||||
'ext': 'flv',
|
||||
'title': 'A Conversation With President Obama',
|
||||
'description': 'President Obama urges persistence in confronting racism and bias.',
|
||||
'duration': 1534,
|
||||
'upload_date': '20141208',
|
||||
'thumbnail': r're:(?i)^https?://.*\.jpg$',
|
||||
'subtitles': {
|
||||
'en': 'mincount:2',
|
||||
},
|
||||
},
|
||||
'params': {
|
||||
# rtmp download
|
||||
'skip_download': True,
|
||||
},
|
||||
from .mtv import MTVServicesBaseIE
|
||||
|
||||
|
||||
class BetIE(MTVServicesBaseIE):
|
||||
_VALID_URL = r'https?://(?:www\.)?bet\.com/(?:video-clips|episodes)/(?P<id>[\da-z]{6})'
|
||||
_TESTS = [{
|
||||
'url': 'https://www.bet.com/video-clips/w9mk7v',
|
||||
'info_dict': {
|
||||
'id': '3022d121-d191-43fd-b5fb-b2c26f335497',
|
||||
'ext': 'mp4',
|
||||
'display_id': 'w9mk7v',
|
||||
'title': 'New Normal',
|
||||
'description': 'md5:d7898c124713b4646cecad9d16ff01f3',
|
||||
'duration': 30.08,
|
||||
'series': 'Tyler Perry\'s Sistas',
|
||||
'season': 'Season 0',
|
||||
'season_number': 0,
|
||||
'episode': 'Episode 0',
|
||||
'episode_number': 0,
|
||||
'timestamp': 1755269073,
|
||||
'upload_date': '20250815',
|
||||
},
|
||||
{
|
||||
'url': 'http://www.bet.com/video/news/national/2014/justice-for-ferguson-a-community-reacts.html',
|
||||
'info_dict': {
|
||||
'id': '9f516bf1-7543-39c4-8076-dd441b459ba9',
|
||||
'display_id': 'justice-for-ferguson-a-community-reacts',
|
||||
'ext': 'flv',
|
||||
'title': 'Justice for Ferguson: A Community Reacts',
|
||||
'description': 'A BET News special.',
|
||||
'duration': 1696,
|
||||
'upload_date': '20141125',
|
||||
'thumbnail': r're:(?i)^https?://.*\.jpg$',
|
||||
'subtitles': {
|
||||
'en': 'mincount:2',
|
||||
},
|
||||
},
|
||||
'params': {
|
||||
# rtmp download
|
||||
'skip_download': True,
|
||||
},
|
||||
'params': {'skip_download': 'm3u8'},
|
||||
}, {
|
||||
'url': 'https://www.bet.com/episodes/nmce72/tyler-perry-s-sistas-heavy-is-the-crown-season-9-ep-5',
|
||||
'info_dict': {
|
||||
'id': '6427562b-3029-11f0-b405-16fff45bc035',
|
||||
'ext': 'mp4',
|
||||
'display_id': 'nmce72',
|
||||
'title': 'Heavy Is the Crown',
|
||||
'description': 'md5:1ed345d3157a50572d2464afcc7a652a',
|
||||
'channel': 'BET',
|
||||
'duration': 2550.0,
|
||||
'thumbnail': r're:https://images\.paramount\.tech/uri/mgid:arc:imageassetref',
|
||||
'series': 'Tyler Perry\'s Sistas',
|
||||
'season': 'Season 9',
|
||||
'season_number': 9,
|
||||
'episode': 'Episode 5',
|
||||
'episode_number': 5,
|
||||
'timestamp': 1755165600,
|
||||
'upload_date': '20250814',
|
||||
'release_timestamp': 1755129600,
|
||||
'release_date': '20250814',
|
||||
},
|
||||
]
|
||||
|
||||
_FEED_URL = 'http://feeds.mtvnservices.com/od/feed/bet-mrss-player'
|
||||
|
||||
def _get_feed_query(self, uri):
|
||||
return {
|
||||
'uuid': uri,
|
||||
}
|
||||
|
||||
def _extract_mgid(self, webpage):
|
||||
return self._search_regex(r'data-uri="([^"]+)', webpage, 'mgid')
|
||||
|
||||
def _real_extract(self, url):
|
||||
display_id = self._match_id(url)
|
||||
|
||||
webpage = self._download_webpage(url, display_id)
|
||||
mgid = self._extract_mgid(webpage)
|
||||
videos_info = self._get_videos_info(mgid)
|
||||
|
||||
info_dict = videos_info['entries'][0]
|
||||
|
||||
upload_date = unified_strdate(self._html_search_meta('date', webpage))
|
||||
description = self._html_search_meta('description', webpage)
|
||||
|
||||
info_dict.update({
|
||||
'display_id': display_id,
|
||||
'description': description,
|
||||
'upload_date': upload_date,
|
||||
})
|
||||
|
||||
return info_dict
|
||||
'params': {'skip_download': 'm3u8'},
|
||||
'skip': 'Requires provider sign-in',
|
||||
}]
|
||||
|
||||
@ -1,188 +0,0 @@
|
||||
from .adobepass import AdobePassIE
|
||||
from ..networking import HEADRequest
|
||||
from ..utils import (
|
||||
extract_attributes,
|
||||
float_or_none,
|
||||
get_element_html_by_class,
|
||||
int_or_none,
|
||||
merge_dicts,
|
||||
parse_age_limit,
|
||||
remove_end,
|
||||
str_or_none,
|
||||
traverse_obj,
|
||||
unescapeHTML,
|
||||
unified_timestamp,
|
||||
update_url_query,
|
||||
url_or_none,
|
||||
)
|
||||
|
||||
|
||||
class BravoTVIE(AdobePassIE):
|
||||
_VALID_URL = r'https?://(?:www\.)?(?P<site>bravotv|oxygen)\.com/(?:[^/]+/)+(?P<id>[^/?#]+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://www.bravotv.com/top-chef/season-16/episode-15/videos/the-top-chef-season-16-winner-is',
|
||||
'info_dict': {
|
||||
'id': '3923059',
|
||||
'ext': 'mp4',
|
||||
'title': 'The Top Chef Season 16 Winner Is...',
|
||||
'description': 'Find out who takes the title of Top Chef!',
|
||||
'upload_date': '20190314',
|
||||
'timestamp': 1552591860,
|
||||
'season_number': 16,
|
||||
'episode_number': 15,
|
||||
'series': 'Top Chef',
|
||||
'episode': 'The Top Chef Season 16 Winner Is...',
|
||||
'duration': 190.357,
|
||||
'season': 'Season 16',
|
||||
'thumbnail': r're:^https://.+\.jpg',
|
||||
},
|
||||
'params': {'skip_download': 'm3u8'},
|
||||
}, {
|
||||
'url': 'https://www.bravotv.com/top-chef/season-20/episode-1/london-calling',
|
||||
'info_dict': {
|
||||
'id': '9000234570',
|
||||
'ext': 'mp4',
|
||||
'title': 'London Calling',
|
||||
'description': 'md5:5af95a8cbac1856bd10e7562f86bb759',
|
||||
'upload_date': '20230310',
|
||||
'timestamp': 1678410000,
|
||||
'season_number': 20,
|
||||
'episode_number': 1,
|
||||
'series': 'Top Chef',
|
||||
'episode': 'London Calling',
|
||||
'duration': 3266.03,
|
||||
'season': 'Season 20',
|
||||
'chapters': 'count:7',
|
||||
'thumbnail': r're:^https://.+\.jpg',
|
||||
'age_limit': 14,
|
||||
},
|
||||
'params': {'skip_download': 'm3u8'},
|
||||
'skip': 'This video requires AdobePass MSO credentials',
|
||||
}, {
|
||||
'url': 'https://www.oxygen.com/in-ice-cold-blood/season-1/closing-night',
|
||||
'info_dict': {
|
||||
'id': '3692045',
|
||||
'ext': 'mp4',
|
||||
'title': 'Closing Night',
|
||||
'description': 'md5:3170065c5c2f19548d72a4cbc254af63',
|
||||
'upload_date': '20180401',
|
||||
'timestamp': 1522623600,
|
||||
'season_number': 1,
|
||||
'episode_number': 1,
|
||||
'series': 'In Ice Cold Blood',
|
||||
'episode': 'Closing Night',
|
||||
'duration': 2629.051,
|
||||
'season': 'Season 1',
|
||||
'chapters': 'count:6',
|
||||
'thumbnail': r're:^https://.+\.jpg',
|
||||
'age_limit': 14,
|
||||
},
|
||||
'params': {'skip_download': 'm3u8'},
|
||||
'skip': 'This video requires AdobePass MSO credentials',
|
||||
}, {
|
||||
'url': 'https://www.oxygen.com/in-ice-cold-blood/season-2/episode-16/videos/handling-the-horwitz-house-after-the-murder-season-2',
|
||||
'info_dict': {
|
||||
'id': '3974019',
|
||||
'ext': 'mp4',
|
||||
'title': '\'Handling The Horwitz House After The Murder (Season 2, Episode 16)',
|
||||
'description': 'md5:f9d638dd6946a1c1c0533a9c6100eae5',
|
||||
'upload_date': '20190617',
|
||||
'timestamp': 1560790800,
|
||||
'season_number': 2,
|
||||
'episode_number': 16,
|
||||
'series': 'In Ice Cold Blood',
|
||||
'episode': '\'Handling The Horwitz House After The Murder (Season 2, Episode 16)',
|
||||
'duration': 68.235,
|
||||
'season': 'Season 2',
|
||||
'thumbnail': r're:^https://.+\.jpg',
|
||||
'age_limit': 14,
|
||||
},
|
||||
'params': {'skip_download': 'm3u8'},
|
||||
}, {
|
||||
'url': 'https://www.bravotv.com/below-deck/season-3/ep-14-reunion-part-1',
|
||||
'only_matching': True,
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
site, display_id = self._match_valid_url(url).group('site', 'id')
|
||||
webpage = self._download_webpage(url, display_id)
|
||||
settings = self._search_json(
|
||||
r'<script[^>]+data-drupal-selector="drupal-settings-json"[^>]*>', webpage, 'settings', display_id)
|
||||
tve = extract_attributes(get_element_html_by_class('tve-video-deck-app', webpage) or '')
|
||||
query = {
|
||||
'manifest': 'm3u',
|
||||
'formats': 'm3u,mpeg4',
|
||||
}
|
||||
|
||||
if tve:
|
||||
account_pid = tve.get('data-mpx-media-account-pid') or 'HNK2IC'
|
||||
account_id = tve['data-mpx-media-account-id']
|
||||
metadata = self._parse_json(
|
||||
tve.get('data-normalized-video', ''), display_id, fatal=False, transform_source=unescapeHTML)
|
||||
video_id = tve.get('data-guid') or metadata['guid']
|
||||
if tve.get('data-entitlement') == 'auth':
|
||||
auth = traverse_obj(settings, ('tve_adobe_auth', {dict})) or {}
|
||||
site = remove_end(site, 'tv')
|
||||
release_pid = tve['data-release-pid']
|
||||
resource = self._get_mvpd_resource(
|
||||
tve.get('data-adobe-pass-resource-id') or auth.get('adobePassResourceId') or site,
|
||||
tve['data-title'], release_pid, tve.get('data-rating'))
|
||||
query.update({
|
||||
'switch': 'HLSServiceSecure',
|
||||
'auth': self._extract_mvpd_auth(
|
||||
url, release_pid, auth.get('adobePassRequestorId') or site, resource),
|
||||
})
|
||||
|
||||
else:
|
||||
ls_playlist = traverse_obj(settings, ('ls_playlist', ..., {dict}), get_all=False) or {}
|
||||
account_pid = ls_playlist.get('mpxMediaAccountPid') or 'PHSl-B'
|
||||
account_id = ls_playlist['mpxMediaAccountId']
|
||||
video_id = ls_playlist['defaultGuid']
|
||||
metadata = traverse_obj(
|
||||
ls_playlist, ('videos', lambda _, v: v['guid'] == video_id, {dict}), get_all=False)
|
||||
|
||||
tp_url = f'https://link.theplatform.com/s/{account_pid}/media/guid/{account_id}/{video_id}'
|
||||
tp_metadata = self._download_json(
|
||||
update_url_query(tp_url, {'format': 'preview'}), video_id, fatal=False)
|
||||
|
||||
chapters = traverse_obj(tp_metadata, ('chapters', ..., {
|
||||
'start_time': ('startTime', {float_or_none(scale=1000)}),
|
||||
'end_time': ('endTime', {float_or_none(scale=1000)}),
|
||||
}))
|
||||
# prune pointless single chapters that span the entire duration from short videos
|
||||
if len(chapters) == 1 and not traverse_obj(chapters, (0, 'end_time')):
|
||||
chapters = None
|
||||
|
||||
m3u8_url = self._request_webpage(HEADRequest(
|
||||
update_url_query(f'{tp_url}/stream.m3u8', query)), video_id, 'Checking m3u8 URL').url
|
||||
if 'mpeg_cenc' in m3u8_url:
|
||||
self.report_drm(video_id)
|
||||
formats, subtitles = self._extract_m3u8_formats_and_subtitles(m3u8_url, video_id, 'mp4', m3u8_id='hls')
|
||||
|
||||
return {
|
||||
'id': video_id,
|
||||
'formats': formats,
|
||||
'subtitles': subtitles,
|
||||
'chapters': chapters,
|
||||
**merge_dicts(traverse_obj(tp_metadata, {
|
||||
'title': 'title',
|
||||
'description': 'description',
|
||||
'duration': ('duration', {float_or_none(scale=1000)}),
|
||||
'timestamp': ('pubDate', {float_or_none(scale=1000)}),
|
||||
'season_number': (('pl1$seasonNumber', 'nbcu$seasonNumber'), {int_or_none}),
|
||||
'episode_number': (('pl1$episodeNumber', 'nbcu$episodeNumber'), {int_or_none}),
|
||||
'series': (('pl1$show', 'nbcu$show'), (None, ...), {str}),
|
||||
'episode': (('title', 'pl1$episodeNumber', 'nbcu$episodeNumber'), {str_or_none}),
|
||||
'age_limit': ('ratings', ..., 'rating', {parse_age_limit}),
|
||||
}, get_all=False), traverse_obj(metadata, {
|
||||
'title': 'title',
|
||||
'description': 'description',
|
||||
'duration': ('durationInSeconds', {int_or_none}),
|
||||
'timestamp': ('airDate', {unified_timestamp}),
|
||||
'thumbnail': ('thumbnailUrl', {url_or_none}),
|
||||
'season_number': ('seasonNumber', {int_or_none}),
|
||||
'episode_number': ('episodeNumber', {int_or_none}),
|
||||
'episode': 'episodeTitle',
|
||||
'series': 'show',
|
||||
})),
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue