mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2025-01-10 18:09:34 +00:00
Merge branch 'yt-dlp:master' into viu-indonesia-fix-6482-partial
This commit is contained in:
commit
827821d6ca
104
.github/workflows/build.yml
vendored
104
.github/workflows/build.yml
vendored
|
@ -107,10 +107,10 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10"
|
||||
- uses: conda-incubator/setup-miniconda@v2
|
||||
- uses: conda-incubator/setup-miniconda@v3
|
||||
with:
|
||||
miniforge-variant: Mambaforge
|
||||
use-mamba: true
|
||||
|
@ -121,16 +121,14 @@ jobs:
|
|||
- name: Install Requirements
|
||||
run: |
|
||||
sudo apt -y install zip pandoc man sed
|
||||
reqs=$(mktemp)
|
||||
cat > "$reqs" << EOF
|
||||
cat > ./requirements.txt << EOF
|
||||
python=3.10.*
|
||||
pyinstaller
|
||||
cffi
|
||||
brotli-python
|
||||
secretstorage
|
||||
EOF
|
||||
sed -E '/^(brotli|secretstorage).*/d' requirements.txt >> "$reqs"
|
||||
mamba create -n build --file "$reqs"
|
||||
python devscripts/install_deps.py --print \
|
||||
--exclude brotli --exclude brotlicffi \
|
||||
--include secretstorage --include pyinstaller >> ./requirements.txt
|
||||
mamba create -n build --file ./requirements.txt
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
|
@ -144,9 +142,9 @@ jobs:
|
|||
run: |
|
||||
unset LD_LIBRARY_PATH # Harmful; set by setup-python
|
||||
conda activate build
|
||||
python pyinst.py --onedir
|
||||
python -m bundle.pyinstaller --onedir
|
||||
(cd ./dist/yt-dlp_linux && zip -r ../yt-dlp_linux.zip .)
|
||||
python pyinst.py
|
||||
python -m bundle.pyinstaller
|
||||
mv ./dist/yt-dlp_linux ./yt-dlp_linux
|
||||
mv ./dist/yt-dlp_linux.zip ./yt-dlp_linux.zip
|
||||
|
||||
|
@ -164,13 +162,15 @@ jobs:
|
|||
done
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-${{ github.job }}
|
||||
path: |
|
||||
yt-dlp
|
||||
yt-dlp.tar.gz
|
||||
yt-dlp_linux
|
||||
yt-dlp_linux.zip
|
||||
compression-level: 0
|
||||
|
||||
linux_arm:
|
||||
needs: process
|
||||
|
@ -201,17 +201,18 @@ jobs:
|
|||
dockerRunArgs: --volume "${PWD}/repo:/repo"
|
||||
install: | # Installing Python 3.10 from the Deadsnakes repo raises errors
|
||||
apt update
|
||||
apt -y install zlib1g-dev python3.8 python3.8-dev python3.8-distutils python3-pip
|
||||
apt -y install zlib1g-dev libffi-dev python3.8 python3.8-dev python3.8-distutils python3-pip
|
||||
python3.8 -m pip install -U pip setuptools wheel
|
||||
# Cannot access requirements.txt from the repo directory at this stage
|
||||
python3.8 -m pip install -U Pyinstaller mutagen pycryptodomex websockets brotli certifi secretstorage
|
||||
# Cannot access any files from the repo directory at this stage
|
||||
python3.8 -m pip install -U Pyinstaller mutagen pycryptodomex websockets brotli certifi secretstorage cffi
|
||||
|
||||
run: |
|
||||
cd repo
|
||||
python3.8 -m pip install -U Pyinstaller secretstorage -r requirements.txt # Cached version may be out of date
|
||||
python3.8 devscripts/install_deps.py -o --include build
|
||||
python3.8 devscripts/install_deps.py --include pyinstaller --include secretstorage # Cached version may be out of date
|
||||
python3.8 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python3.8 devscripts/make_lazy_extractors.py
|
||||
python3.8 pyinst.py
|
||||
python3.8 -m bundle.pyinstaller
|
||||
|
||||
if ${{ vars.UPDATE_TO_VERIFICATION && 'true' || 'false' }}; then
|
||||
arch="${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}"
|
||||
|
@ -224,10 +225,12 @@ jobs:
|
|||
fi
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-linux_${{ matrix.architecture }}
|
||||
path: | # run-on-arch-action designates armv7l as armv7
|
||||
repo/dist/yt-dlp_linux_${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}
|
||||
compression-level: 0
|
||||
|
||||
macos:
|
||||
needs: process
|
||||
|
@ -240,9 +243,10 @@ jobs:
|
|||
- name: Install Requirements
|
||||
run: |
|
||||
brew install coreutils
|
||||
python3 -m pip install -U --user pip setuptools wheel
|
||||
python3 devscripts/install_deps.py --user -o --include build
|
||||
python3 devscripts/install_deps.py --print --include pyinstaller > requirements.txt
|
||||
# We need to ignore wheels otherwise we break universal2 builds
|
||||
python3 -m pip install -U --user --no-binary :all: Pyinstaller -r requirements.txt
|
||||
python3 -m pip install -U --user --no-binary :all: -r requirements.txt
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
|
@ -250,9 +254,9 @@ jobs:
|
|||
python3 devscripts/make_lazy_extractors.py
|
||||
- name: Build
|
||||
run: |
|
||||
python3 pyinst.py --target-architecture universal2 --onedir
|
||||
python3 -m bundle.pyinstaller --target-architecture universal2 --onedir
|
||||
(cd ./dist/yt-dlp_macos && zip -r ../yt-dlp_macos.zip .)
|
||||
python3 pyinst.py --target-architecture universal2
|
||||
python3 -m bundle.pyinstaller --target-architecture universal2
|
||||
|
||||
- name: Verify --update-to
|
||||
if: vars.UPDATE_TO_VERIFICATION
|
||||
|
@ -265,11 +269,13 @@ jobs:
|
|||
[[ "$version" != "$downgraded_version" ]]
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-${{ github.job }}
|
||||
path: |
|
||||
dist/yt-dlp_macos
|
||||
dist/yt-dlp_macos.zip
|
||||
compression-level: 0
|
||||
|
||||
macos_legacy:
|
||||
needs: process
|
||||
|
@ -293,8 +299,8 @@ jobs:
|
|||
- name: Install Requirements
|
||||
run: |
|
||||
brew install coreutils
|
||||
python3 -m pip install -U --user pip setuptools wheel
|
||||
python3 -m pip install -U --user Pyinstaller -r requirements.txt
|
||||
python3 devscripts/install_deps.py --user -o --include build
|
||||
python3 devscripts/install_deps.py --user --include pyinstaller
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
|
@ -302,7 +308,7 @@ jobs:
|
|||
python3 devscripts/make_lazy_extractors.py
|
||||
- name: Build
|
||||
run: |
|
||||
python3 pyinst.py
|
||||
python3 -m bundle.pyinstaller
|
||||
mv dist/yt-dlp_macos dist/yt-dlp_macos_legacy
|
||||
|
||||
- name: Verify --update-to
|
||||
|
@ -316,10 +322,12 @@ jobs:
|
|||
[[ "$version" != "$downgraded_version" ]]
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-${{ github.job }}
|
||||
path: |
|
||||
dist/yt-dlp_macos_legacy
|
||||
compression-level: 0
|
||||
|
||||
windows:
|
||||
needs: process
|
||||
|
@ -328,13 +336,14 @@ jobs:
|
|||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with: # 3.8 is used for Win7 support
|
||||
python-version: "3.8"
|
||||
- name: Install Requirements
|
||||
run: | # Custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds
|
||||
python -m pip install -U pip setuptools wheel py2exe
|
||||
pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/x86_64/pyinstaller-5.8.0-py3-none-any.whl" -r requirements.txt
|
||||
python devscripts/install_deps.py -o --include build
|
||||
python devscripts/install_deps.py --include py2exe
|
||||
python -m pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/x86_64/pyinstaller-5.8.0-py3-none-any.whl"
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
|
@ -342,10 +351,10 @@ jobs:
|
|||
python devscripts/make_lazy_extractors.py
|
||||
- name: Build
|
||||
run: |
|
||||
python setup.py py2exe
|
||||
python -m bundle.py2exe
|
||||
Move-Item ./dist/yt-dlp.exe ./dist/yt-dlp_min.exe
|
||||
python pyinst.py
|
||||
python pyinst.py --onedir
|
||||
python -m bundle.pyinstaller
|
||||
python -m bundle.pyinstaller --onedir
|
||||
Compress-Archive -Path ./dist/yt-dlp/* -DestinationPath ./dist/yt-dlp_win.zip
|
||||
|
||||
- name: Verify --update-to
|
||||
|
@ -362,12 +371,14 @@ jobs:
|
|||
}
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-${{ github.job }}
|
||||
path: |
|
||||
dist/yt-dlp.exe
|
||||
dist/yt-dlp_min.exe
|
||||
dist/yt-dlp_win.zip
|
||||
compression-level: 0
|
||||
|
||||
windows32:
|
||||
needs: process
|
||||
|
@ -376,14 +387,15 @@ jobs:
|
|||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.8"
|
||||
architecture: "x86"
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
python -m pip install -U pip setuptools wheel
|
||||
pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/i686/pyinstaller-5.8.0-py3-none-any.whl" -r requirements.txt
|
||||
python devscripts/install_deps.py -o --include build
|
||||
python devscripts/install_deps.py
|
||||
python -m pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/i686/pyinstaller-5.8.0-py3-none-any.whl"
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
|
@ -391,7 +403,7 @@ jobs:
|
|||
python devscripts/make_lazy_extractors.py
|
||||
- name: Build
|
||||
run: |
|
||||
python pyinst.py
|
||||
python -m bundle.pyinstaller
|
||||
|
||||
- name: Verify --update-to
|
||||
if: vars.UPDATE_TO_VERIFICATION
|
||||
|
@ -407,10 +419,12 @@ jobs:
|
|||
}
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-${{ github.job }}
|
||||
path: |
|
||||
dist/yt-dlp_x86.exe
|
||||
compression-level: 0
|
||||
|
||||
meta_files:
|
||||
if: inputs.meta_files && always() && !cancelled()
|
||||
|
@ -424,7 +438,11 @@ jobs:
|
|||
- windows32
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/download-artifact@v3
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: artifact
|
||||
pattern: build-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Make SHA2-SUMS files
|
||||
run: |
|
||||
|
@ -459,8 +477,10 @@ jobs:
|
|||
done
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-${{ github.job }}
|
||||
path: |
|
||||
SHA*SUMS*
|
||||
_update_spec
|
||||
SHA*SUMS*
|
||||
compression-level: 0
|
||||
|
|
4
.github/workflows/core.yml
vendored
4
.github/workflows/core.yml
vendored
|
@ -49,11 +49,11 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install test requirements
|
||||
run: pip install pytest -r requirements.txt
|
||||
run: python3 ./devscripts/install_deps.py --include dev
|
||||
- name: Run tests
|
||||
continue-on-error: False
|
||||
run: |
|
||||
|
|
8
.github/workflows/download.yml
vendored
8
.github/workflows/download.yml
vendored
|
@ -11,11 +11,11 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install test requirements
|
||||
run: pip install pytest -r requirements.txt
|
||||
run: python3 ./devscripts/install_deps.py --include dev
|
||||
- name: Run tests
|
||||
continue-on-error: true
|
||||
run: python3 ./devscripts/run_tests.py download
|
||||
|
@ -38,11 +38,11 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install test requirements
|
||||
run: pip install pytest -r requirements.txt
|
||||
run: python3 ./devscripts/install_deps.py --include dev
|
||||
- name: Run tests
|
||||
continue-on-error: true
|
||||
run: python3 ./devscripts/run_tests.py download
|
||||
|
|
10
.github/workflows/quick-test.yml
vendored
10
.github/workflows/quick-test.yml
vendored
|
@ -11,11 +11,11 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python 3.8
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.8'
|
||||
- name: Install test requirements
|
||||
run: pip install pytest -r requirements.txt
|
||||
run: python3 ./devscripts/install_deps.py --include dev
|
||||
- name: Run tests
|
||||
run: |
|
||||
python3 -m yt_dlp -v || true
|
||||
|
@ -26,10 +26,10 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- name: Install flake8
|
||||
run: pip install flake8
|
||||
run: python3 ./devscripts/install_deps.py -o --include dev
|
||||
- name: Make lazy extractors
|
||||
run: python devscripts/make_lazy_extractors.py
|
||||
run: python3 ./devscripts/make_lazy_extractors.py
|
||||
- name: Run flake8
|
||||
run: flake8 .
|
||||
|
|
6
.github/workflows/release-master.yml
vendored
6
.github/workflows/release-master.yml
vendored
|
@ -6,8 +6,10 @@ on:
|
|||
paths:
|
||||
- "yt_dlp/**.py"
|
||||
- "!yt_dlp/version.py"
|
||||
- "setup.py"
|
||||
- "pyinst.py"
|
||||
- "bundle/*.py"
|
||||
- "pyproject.toml"
|
||||
- "Makefile"
|
||||
- ".github/workflows/build.yml"
|
||||
concurrency:
|
||||
group: release-master
|
||||
permissions:
|
||||
|
|
9
.github/workflows/release-nightly.yml
vendored
9
.github/workflows/release-nightly.yml
vendored
|
@ -18,7 +18,14 @@ jobs:
|
|||
- name: Check for new commits
|
||||
id: check_for_new_commits
|
||||
run: |
|
||||
relevant_files=("yt_dlp/*.py" ':!yt_dlp/version.py' "setup.py" "pyinst.py")
|
||||
relevant_files=(
|
||||
"yt_dlp/*.py"
|
||||
':!yt_dlp/version.py'
|
||||
"bundle/*.py"
|
||||
"pyproject.toml"
|
||||
"Makefile"
|
||||
".github/workflows/build.yml"
|
||||
)
|
||||
echo "commit=$(git log --format=%H -1 --since="24 hours ago" -- "${relevant_files[@]}")" | tee "$GITHUB_OUTPUT"
|
||||
|
||||
release:
|
||||
|
|
26
.github/workflows/release.yml
vendored
26
.github/workflows/release.yml
vendored
|
@ -71,7 +71,7 @@ jobs:
|
|||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
|
@ -246,15 +246,16 @@ jobs:
|
|||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
sudo apt -y install pandoc man
|
||||
python -m pip install -U pip setuptools wheel twine
|
||||
python -m pip install -U -r requirements.txt
|
||||
python devscripts/install_deps.py -o --include build
|
||||
|
||||
- name: Prepare
|
||||
env:
|
||||
|
@ -266,14 +267,19 @@ jobs:
|
|||
run: |
|
||||
python devscripts/update-version.py -c "${{ env.channel }}" -r "${{ env.target_repo }}" -s "${{ env.suffix }}" "${{ env.version }}"
|
||||
python devscripts/make_lazy_extractors.py
|
||||
sed -i -E "s/(name=')[^']+(', # package name)/\1${{ env.pypi_project }}\2/" setup.py
|
||||
sed -i -E '0,/(name = ")[^"]+(")/s//\1${{ env.pypi_project }}\2/' pyproject.toml
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
rm -rf dist/*
|
||||
make pypi-files
|
||||
printf '%s\n\n' \
|
||||
'Official repository: <https://github.com/yt-dlp/yt-dlp>' \
|
||||
'**PS**: Some links in this document will not work since this is a copy of the README.md from Github' > ./README.md.new
|
||||
cat ./README.md >> ./README.md.new && mv -f ./README.md.new ./README.md
|
||||
python devscripts/set-variant.py pip -M "You installed yt-dlp with pip or using the wheel from PyPi; Use that to update"
|
||||
python setup.py sdist bdist_wheel
|
||||
make clean-cache
|
||||
python -m build --no-isolation .
|
||||
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
|
@ -290,8 +296,12 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/download-artifact@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: artifact
|
||||
pattern: build-*
|
||||
merge-multiple: true
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
|
|
10
MANIFEST.in
10
MANIFEST.in
|
@ -1,10 +0,0 @@
|
|||
include AUTHORS
|
||||
include Changelog.md
|
||||
include LICENSE
|
||||
include README.md
|
||||
include completions/*/*
|
||||
include supportedsites.md
|
||||
include yt-dlp.1
|
||||
include requirements.txt
|
||||
recursive-include devscripts *
|
||||
recursive-include test *
|
38
Makefile
38
Makefile
|
@ -6,11 +6,11 @@ doc: README.md CONTRIBUTING.md issuetemplates supportedsites
|
|||
ot: offlinetest
|
||||
tar: yt-dlp.tar.gz
|
||||
|
||||
# Keep this list in sync with MANIFEST.in
|
||||
# Keep this list in sync with pyproject.toml includes/artifacts
|
||||
# intended use: when building a source distribution,
|
||||
# make pypi-files && python setup.py sdist
|
||||
# make pypi-files && python3 -m build -sn .
|
||||
pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites \
|
||||
completions yt-dlp.1 requirements.txt setup.cfg devscripts/* test/*
|
||||
completions yt-dlp.1 pyproject.toml setup.cfg devscripts/* test/*
|
||||
|
||||
.PHONY: all clean install test tar pypi-files completions ot offlinetest codetest supportedsites
|
||||
|
||||
|
@ -21,7 +21,7 @@ clean-test:
|
|||
*.mp4 *.mpga *.oga *.ogg *.opus *.png *.sbv *.srt *.swf *.swp *.tt *.ttml *.url *.vtt *.wav *.webloc *.webm *.webp
|
||||
clean-dist:
|
||||
rm -rf yt-dlp.1.temp.md yt-dlp.1 README.txt MANIFEST build/ dist/ .coverage cover/ yt-dlp.tar.gz completions/ \
|
||||
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS .mailmap
|
||||
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS
|
||||
clean-cache:
|
||||
find . \( \
|
||||
-type d -name .pytest_cache -o -type d -name __pycache__ -o -name "*.pyc" -o -name "*.class" \
|
||||
|
@ -73,24 +73,24 @@ test:
|
|||
offlinetest: codetest
|
||||
$(PYTHON) -m pytest -k "not download"
|
||||
|
||||
# XXX: This is hard to maintain
|
||||
CODE_FOLDERS = yt_dlp yt_dlp/downloader yt_dlp/extractor yt_dlp/postprocessor yt_dlp/compat yt_dlp/compat/urllib yt_dlp/utils yt_dlp/dependencies yt_dlp/networking
|
||||
yt-dlp: yt_dlp/*.py yt_dlp/*/*.py
|
||||
CODE_FOLDERS := $(shell find yt_dlp -type d -not -name '__*' -exec sh -c 'test -e "$$1"/__init__.py' sh {} \; -print)
|
||||
CODE_FILES := $(shell for f in $(CODE_FOLDERS); do echo "$$f" | awk '{gsub(/\/[^\/]+/,"/*"); print $$1"/*.py"}'; done | sort -u)
|
||||
yt-dlp: $(CODE_FILES)
|
||||
mkdir -p zip
|
||||
for d in $(CODE_FOLDERS) ; do \
|
||||
mkdir -p zip/$$d ;\
|
||||
cp -pPR $$d/*.py zip/$$d/ ;\
|
||||
done
|
||||
touch -t 200001010101 zip/yt_dlp/*.py zip/yt_dlp/*/*.py
|
||||
cd zip ; touch -t 200001010101 $(CODE_FILES)
|
||||
mv zip/yt_dlp/__main__.py zip/
|
||||
cd zip ; zip -q ../yt-dlp yt_dlp/*.py yt_dlp/*/*.py __main__.py
|
||||
cd zip ; zip -q ../yt-dlp $(CODE_FILES) __main__.py
|
||||
rm -rf zip
|
||||
echo '#!$(PYTHON)' > yt-dlp
|
||||
cat yt-dlp.zip >> yt-dlp
|
||||
rm yt-dlp.zip
|
||||
chmod a+x yt-dlp
|
||||
|
||||
README.md: yt_dlp/*.py yt_dlp/*/*.py devscripts/make_readme.py
|
||||
README.md: $(CODE_FILES) devscripts/make_readme.py
|
||||
COLUMNS=80 $(PYTHON) yt_dlp/__main__.py --ignore-config --help | $(PYTHON) devscripts/make_readme.py
|
||||
|
||||
CONTRIBUTING.md: README.md devscripts/make_contributing.py
|
||||
|
@ -115,15 +115,15 @@ yt-dlp.1: README.md devscripts/prepare_manpage.py
|
|||
pandoc -s -f $(MARKDOWN) -t man yt-dlp.1.temp.md -o yt-dlp.1
|
||||
rm -f yt-dlp.1.temp.md
|
||||
|
||||
completions/bash/yt-dlp: yt_dlp/*.py yt_dlp/*/*.py devscripts/bash-completion.in
|
||||
completions/bash/yt-dlp: $(CODE_FILES) devscripts/bash-completion.in
|
||||
mkdir -p completions/bash
|
||||
$(PYTHON) devscripts/bash-completion.py
|
||||
|
||||
completions/zsh/_yt-dlp: yt_dlp/*.py yt_dlp/*/*.py devscripts/zsh-completion.in
|
||||
completions/zsh/_yt-dlp: $(CODE_FILES) devscripts/zsh-completion.in
|
||||
mkdir -p completions/zsh
|
||||
$(PYTHON) devscripts/zsh-completion.py
|
||||
|
||||
completions/fish/yt-dlp.fish: yt_dlp/*.py yt_dlp/*/*.py devscripts/fish-completion.in
|
||||
completions/fish/yt-dlp.fish: $(CODE_FILES) devscripts/fish-completion.in
|
||||
mkdir -p completions/fish
|
||||
$(PYTHON) devscripts/fish-completion.py
|
||||
|
||||
|
@ -144,12 +144,8 @@ yt-dlp.tar.gz: all
|
|||
-- \
|
||||
README.md supportedsites.md Changelog.md LICENSE \
|
||||
CONTRIBUTING.md Collaborators.md CONTRIBUTORS AUTHORS \
|
||||
Makefile MANIFEST.in yt-dlp.1 README.txt completions \
|
||||
setup.py setup.cfg yt-dlp yt_dlp requirements.txt \
|
||||
devscripts test
|
||||
Makefile yt-dlp.1 README.txt completions .gitignore \
|
||||
setup.cfg yt-dlp yt_dlp pyproject.toml devscripts test
|
||||
|
||||
AUTHORS: .mailmap
|
||||
git shortlog -s -n | cut -f2 | sort > AUTHORS
|
||||
|
||||
.mailmap:
|
||||
git shortlog -s -e -n | awk '!(out[$$NF]++) { $$1="";sub(/^[ \t]+/,""); print}' > .mailmap
|
||||
AUTHORS:
|
||||
git shortlog -s -n HEAD | cut -f2 | sort > AUTHORS
|
||||
|
|
25
README.md
25
README.md
|
@ -321,19 +321,21 @@ ### Deprecated
|
|||
## COMPILE
|
||||
|
||||
### Standalone PyInstaller Builds
|
||||
To build the standalone executable, you must have Python and `pyinstaller` (plus any of yt-dlp's [optional dependencies](#dependencies) if needed). Once you have all the necessary dependencies installed, simply run `pyinst.py`. The executable will be built for the same architecture (x86/ARM, 32/64 bit) as the Python used.
|
||||
To build the standalone executable, you must have Python and `pyinstaller` (plus any of yt-dlp's [optional dependencies](#dependencies) if needed). The executable will be built for the same architecture (x86/ARM, 32/64 bit) as the Python used. You can run the following commands:
|
||||
|
||||
python3 -m pip install -U pyinstaller -r requirements.txt
|
||||
python3 devscripts/make_lazy_extractors.py
|
||||
python3 pyinst.py
|
||||
```
|
||||
python3 devscripts/install_deps.py --include pyinstaller
|
||||
python3 devscripts/make_lazy_extractors.py
|
||||
python3 -m bundle.pyinstaller
|
||||
```
|
||||
|
||||
On some systems, you may need to use `py` or `python` instead of `python3`.
|
||||
|
||||
`pyinst.py` accepts any arguments that can be passed to `pyinstaller`, such as `--onefile/-F` or `--onedir/-D`, which is further [documented here](https://pyinstaller.org/en/stable/usage.html#what-to-generate).
|
||||
`bundle/pyinstaller.py` accepts any arguments that can be passed to `pyinstaller`, such as `--onefile/-F` or `--onedir/-D`, which is further [documented here](https://pyinstaller.org/en/stable/usage.html#what-to-generate).
|
||||
|
||||
**Note**: Pyinstaller versions below 4.4 [do not support](https://github.com/pyinstaller/pyinstaller#requirements-and-tested-platforms) Python installed from the Windows store without using a virtual environment.
|
||||
|
||||
**Important**: Running `pyinstaller` directly **without** using `pyinst.py` is **not** officially supported. This may or may not work correctly.
|
||||
**Important**: Running `pyinstaller` directly **without** using `bundle/pyinstaller.py` is **not** officially supported. This may or may not work correctly.
|
||||
|
||||
### Platform-independent Binary (UNIX)
|
||||
You will need the build tools `python` (3.8+), `zip`, `make` (GNU), `pandoc`\* and `pytest`\*.
|
||||
|
@ -346,14 +348,17 @@ ### Standalone Py2Exe Builds (Windows)
|
|||
|
||||
While we provide the option to build with [py2exe](https://www.py2exe.org), it is recommended to build [using PyInstaller](#standalone-pyinstaller-builds) instead since the py2exe builds **cannot contain `pycryptodomex`/`certifi` and needs VC++14** on the target computer to run.
|
||||
|
||||
If you wish to build it anyway, install Python and py2exe, and then simply run `setup.py py2exe`
|
||||
If you wish to build it anyway, install Python (if it is not already installed) and you can run the following commands:
|
||||
|
||||
py -m pip install -U py2exe -r requirements.txt
|
||||
py devscripts/make_lazy_extractors.py
|
||||
py setup.py py2exe
|
||||
```
|
||||
py devscripts/install_deps.py --include py2exe
|
||||
py devscripts/make_lazy_extractors.py
|
||||
py -m bundle.py2exe
|
||||
```
|
||||
|
||||
### Related scripts
|
||||
|
||||
* **`devscripts/install_deps.py`** - Install dependencies for yt-dlp.
|
||||
* **`devscripts/update-version.py`** - Update the version number based on current date.
|
||||
* **`devscripts/set-variant.py`** - Set the build variant of the executable.
|
||||
* **`devscripts/make_changelog.py`** - Create a markdown changelog using short commit messages and update `CONTRIBUTORS` file.
|
||||
|
|
1
bundle/__init__.py
Normal file
1
bundle/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
# Empty file
|
59
bundle/py2exe.py
Executable file
59
bundle/py2exe.py
Executable file
|
@ -0,0 +1,59 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Allow execution from anywhere
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import warnings
|
||||
|
||||
from py2exe import freeze
|
||||
|
||||
from devscripts.utils import read_version
|
||||
|
||||
VERSION = read_version()
|
||||
|
||||
|
||||
def main():
|
||||
warnings.warn(
|
||||
'py2exe builds do not support pycryptodomex and needs VC++14 to run. '
|
||||
'It is recommended to run "pyinst.py" to build using pyinstaller instead')
|
||||
|
||||
return freeze(
|
||||
console=[{
|
||||
'script': './yt_dlp/__main__.py',
|
||||
'dest_base': 'yt-dlp',
|
||||
'icon_resources': [(1, 'devscripts/logo.ico')],
|
||||
}],
|
||||
version_info={
|
||||
'version': VERSION,
|
||||
'description': 'A youtube-dl fork with additional features and patches',
|
||||
'comments': 'Official repository: <https://github.com/yt-dlp/yt-dlp>',
|
||||
'product_name': 'yt-dlp',
|
||||
'product_version': VERSION,
|
||||
},
|
||||
options={
|
||||
'bundle_files': 0,
|
||||
'compressed': 1,
|
||||
'optimize': 2,
|
||||
'dist_dir': './dist',
|
||||
'excludes': [
|
||||
# py2exe cannot import Crypto
|
||||
'Crypto',
|
||||
'Cryptodome',
|
||||
# py2exe appears to confuse this with our socks library.
|
||||
# We don't use pysocks and urllib3.contrib.socks would fail to import if tried.
|
||||
'urllib3.contrib.socks'
|
||||
],
|
||||
'dll_excludes': ['w9xpopen.exe', 'crypt32.dll'],
|
||||
# Modules that are only imported dynamically must be added here
|
||||
'includes': ['yt_dlp.compat._legacy', 'yt_dlp.compat._deprecated',
|
||||
'yt_dlp.utils._legacy', 'yt_dlp.utils._deprecated'],
|
||||
},
|
||||
zipfile=None,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
2
pyinst.py → bundle/pyinstaller.py
Normal file → Executable file
2
pyinst.py → bundle/pyinstaller.py
Normal file → Executable file
|
@ -4,7 +4,7 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import platform
|
||||
|
66
devscripts/install_deps.py
Executable file
66
devscripts/install_deps.py
Executable file
|
@ -0,0 +1,66 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Allow execution from anywhere
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
from devscripts.tomlparse import parse_toml
|
||||
from devscripts.utils import read_file
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(description='Install dependencies for yt-dlp')
|
||||
parser.add_argument(
|
||||
'input', nargs='?', metavar='TOMLFILE', default='pyproject.toml', help='Input file (default: %(default)s)')
|
||||
parser.add_argument(
|
||||
'-e', '--exclude', metavar='REQUIREMENT', action='append', help='Exclude a required dependency')
|
||||
parser.add_argument(
|
||||
'-i', '--include', metavar='GROUP', action='append', help='Include an optional dependency group')
|
||||
parser.add_argument(
|
||||
'-o', '--only-optional', action='store_true', help='Only install optional dependencies')
|
||||
parser.add_argument(
|
||||
'-p', '--print', action='store_true', help='Only print a requirements.txt to stdout')
|
||||
parser.add_argument(
|
||||
'-u', '--user', action='store_true', help='Install with pip as --user')
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
toml_data = parse_toml(read_file(args.input))
|
||||
deps = toml_data['project']['dependencies']
|
||||
targets = deps.copy() if not args.only_optional else []
|
||||
|
||||
for exclude in args.exclude or []:
|
||||
for dep in deps:
|
||||
simplified_dep = re.match(r'[\w-]+', dep)[0]
|
||||
if dep in targets and (exclude.lower() == simplified_dep.lower() or exclude == dep):
|
||||
targets.remove(dep)
|
||||
|
||||
optional_deps = toml_data['project']['optional-dependencies']
|
||||
for include in args.include or []:
|
||||
group = optional_deps.get(include)
|
||||
if group:
|
||||
targets.extend(group)
|
||||
|
||||
if args.print:
|
||||
for target in targets:
|
||||
print(target)
|
||||
return
|
||||
|
||||
pip_args = [sys.executable, '-m', 'pip', 'install', '-U']
|
||||
if args.user:
|
||||
pip_args.append('--user')
|
||||
pip_args.extend(targets)
|
||||
|
||||
return subprocess.call(pip_args)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
189
devscripts/tomlparse.py
Executable file
189
devscripts/tomlparse.py
Executable file
|
@ -0,0 +1,189 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Simple parser for spec compliant toml files
|
||||
|
||||
A simple toml parser for files that comply with the spec.
|
||||
Should only be used to parse `pyproject.toml` for `install_deps.py`.
|
||||
|
||||
IMPORTANT: INVALID FILES OR MULTILINE STRINGS ARE NOT SUPPORTED!
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import re
|
||||
|
||||
WS = r'(?:[\ \t]*)'
|
||||
STRING_RE = re.compile(r'"(?:\\.|[^\\"\n])*"|\'[^\'\n]*\'')
|
||||
SINGLE_KEY_RE = re.compile(rf'{STRING_RE.pattern}|[A-Za-z0-9_-]+')
|
||||
KEY_RE = re.compile(rf'{WS}(?:{SINGLE_KEY_RE.pattern}){WS}(?:\.{WS}(?:{SINGLE_KEY_RE.pattern}){WS})*')
|
||||
EQUALS_RE = re.compile(rf'={WS}')
|
||||
WS_RE = re.compile(WS)
|
||||
|
||||
_SUBTABLE = rf'(?P<subtable>^\[(?P<is_list>\[)?(?P<path>{KEY_RE.pattern})\]\]?)'
|
||||
EXPRESSION_RE = re.compile(rf'^(?:{_SUBTABLE}|{KEY_RE.pattern}=)', re.MULTILINE)
|
||||
|
||||
LIST_WS_RE = re.compile(rf'{WS}((#[^\n]*)?\n{WS})*')
|
||||
LEFTOVER_VALUE_RE = re.compile(r'[^,}\]\t\n#]+')
|
||||
|
||||
|
||||
def parse_key(value: str):
|
||||
for match in SINGLE_KEY_RE.finditer(value):
|
||||
if match[0][0] == '"':
|
||||
yield json.loads(match[0])
|
||||
elif match[0][0] == '\'':
|
||||
yield match[0][1:-1]
|
||||
else:
|
||||
yield match[0]
|
||||
|
||||
|
||||
def get_target(root: dict, paths: list[str], is_list=False):
|
||||
target = root
|
||||
|
||||
for index, key in enumerate(paths, 1):
|
||||
use_list = is_list and index == len(paths)
|
||||
result = target.get(key)
|
||||
if result is None:
|
||||
result = [] if use_list else {}
|
||||
target[key] = result
|
||||
|
||||
if isinstance(result, dict):
|
||||
target = result
|
||||
elif use_list:
|
||||
target = {}
|
||||
result.append(target)
|
||||
else:
|
||||
target = result[-1]
|
||||
|
||||
assert isinstance(target, dict)
|
||||
return target
|
||||
|
||||
|
||||
def parse_enclosed(data: str, index: int, end: str, ws_re: re.Pattern):
|
||||
index += 1
|
||||
|
||||
if match := ws_re.match(data, index):
|
||||
index = match.end()
|
||||
|
||||
while data[index] != end:
|
||||
index = yield True, index
|
||||
|
||||
if match := ws_re.match(data, index):
|
||||
index = match.end()
|
||||
|
||||
if data[index] == ',':
|
||||
index += 1
|
||||
|
||||
if match := ws_re.match(data, index):
|
||||
index = match.end()
|
||||
|
||||
assert data[index] == end
|
||||
yield False, index + 1
|
||||
|
||||
|
||||
def parse_value(data: str, index: int):
|
||||
if data[index] == '[':
|
||||
result = []
|
||||
|
||||
indices = parse_enclosed(data, index, ']', LIST_WS_RE)
|
||||
valid, index = next(indices)
|
||||
while valid:
|
||||
index, value = parse_value(data, index)
|
||||
result.append(value)
|
||||
valid, index = indices.send(index)
|
||||
|
||||
return index, result
|
||||
|
||||
if data[index] == '{':
|
||||
result = {}
|
||||
|
||||
indices = parse_enclosed(data, index, '}', WS_RE)
|
||||
valid, index = next(indices)
|
||||
while valid:
|
||||
valid, index = indices.send(parse_kv_pair(data, index, result))
|
||||
|
||||
return index, result
|
||||
|
||||
if match := STRING_RE.match(data, index):
|
||||
return match.end(), json.loads(match[0]) if match[0][0] == '"' else match[0][1:-1]
|
||||
|
||||
match = LEFTOVER_VALUE_RE.match(data, index)
|
||||
assert match
|
||||
value = match[0].strip()
|
||||
for func in [
|
||||
int,
|
||||
float,
|
||||
datetime.time.fromisoformat,
|
||||
datetime.date.fromisoformat,
|
||||
datetime.datetime.fromisoformat,
|
||||
{'true': True, 'false': False}.get,
|
||||
]:
|
||||
try:
|
||||
value = func(value)
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return match.end(), value
|
||||
|
||||
|
||||
def parse_kv_pair(data: str, index: int, target: dict):
|
||||
match = KEY_RE.match(data, index)
|
||||
if not match:
|
||||
return None
|
||||
|
||||
*keys, key = parse_key(match[0])
|
||||
|
||||
match = EQUALS_RE.match(data, match.end())
|
||||
assert match
|
||||
index = match.end()
|
||||
|
||||
index, value = parse_value(data, index)
|
||||
get_target(target, keys)[key] = value
|
||||
return index
|
||||
|
||||
|
||||
def parse_toml(data: str):
|
||||
root = {}
|
||||
target = root
|
||||
|
||||
index = 0
|
||||
while True:
|
||||
match = EXPRESSION_RE.search(data, index)
|
||||
if not match:
|
||||
break
|
||||
|
||||
if match.group('subtable'):
|
||||
index = match.end()
|
||||
path, is_list = match.group('path', 'is_list')
|
||||
target = get_target(root, list(parse_key(path)), bool(is_list))
|
||||
continue
|
||||
|
||||
index = parse_kv_pair(data, match.start(), target)
|
||||
assert index is not None
|
||||
|
||||
return root
|
||||
|
||||
|
||||
def main():
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('infile', type=Path, help='The TOML file to read as input')
|
||||
args = parser.parse_args()
|
||||
|
||||
with args.infile.open('r', encoding='utf-8') as file:
|
||||
data = file.read()
|
||||
|
||||
def default(obj):
|
||||
if isinstance(obj, (datetime.date, datetime.time, datetime.datetime)):
|
||||
return obj.isoformat()
|
||||
|
||||
print(json.dumps(parse_toml(data), default=default))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
123
pyproject.toml
123
pyproject.toml
|
@ -1,5 +1,120 @@
|
|||
[build-system]
|
||||
build-backend = 'setuptools.build_meta'
|
||||
# https://github.com/yt-dlp/yt-dlp/issues/5941
|
||||
# https://github.com/pypa/distutils/issues/17
|
||||
requires = ['setuptools > 50']
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[project]
|
||||
name = "yt-dlp"
|
||||
maintainers = [
|
||||
{name = "pukkandan", email = "pukkandan.ytdlp@gmail.com"},
|
||||
{name = "Grub4K", email = "contact@grub4k.xyz"},
|
||||
{name = "bashonly", email = "bashonly@protonmail.com"},
|
||||
]
|
||||
description = "A youtube-dl fork with additional features and patches"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.8"
|
||||
keywords = [
|
||||
"youtube-dl",
|
||||
"video-downloader",
|
||||
"youtube-downloader",
|
||||
"sponsorblock",
|
||||
"youtube-dlc",
|
||||
"yt-dlp",
|
||||
]
|
||||
license = {file = "LICENSE"}
|
||||
classifiers = [
|
||||
"Topic :: Multimedia :: Video",
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Environment :: Console",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: Implementation",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
"License :: OSI Approved :: The Unlicense (Unlicense)",
|
||||
"Operating System :: OS Independent",
|
||||
]
|
||||
dynamic = ["version"]
|
||||
dependencies = [
|
||||
"brotli; implementation_name=='cpython'",
|
||||
"brotlicffi; implementation_name!='cpython'",
|
||||
"certifi",
|
||||
"mutagen",
|
||||
"pycryptodomex",
|
||||
"requests>=2.31.0,<3",
|
||||
"urllib3>=1.26.17,<3",
|
||||
"websockets>=12.0",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
secretstorage = [
|
||||
"cffi",
|
||||
"secretstorage",
|
||||
]
|
||||
build = [
|
||||
"build",
|
||||
"hatchling",
|
||||
"pip",
|
||||
"wheel",
|
||||
]
|
||||
dev = [
|
||||
"flake8",
|
||||
"isort",
|
||||
"pytest",
|
||||
]
|
||||
pyinstaller = ["pyinstaller>=6.3"]
|
||||
py2exe = ["py2exe>=0.12"]
|
||||
|
||||
[project.urls]
|
||||
Documentation = "https://github.com/yt-dlp/yt-dlp#readme"
|
||||
Repository = "https://github.com/yt-dlp/yt-dlp"
|
||||
Tracker = "https://github.com/yt-dlp/yt-dlp/issues"
|
||||
Funding = "https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators"
|
||||
|
||||
[project.scripts]
|
||||
yt-dlp = "yt_dlp:main"
|
||||
|
||||
[project.entry-points.pyinstaller40]
|
||||
hook-dirs = "yt_dlp.__pyinstaller:get_hook_dirs"
|
||||
|
||||
[tool.hatch.build.targets.sdist]
|
||||
include = [
|
||||
"/yt_dlp",
|
||||
"/devscripts",
|
||||
"/test",
|
||||
"/.gitignore", # included by default, needed for auto-excludes
|
||||
"/Changelog.md",
|
||||
"/LICENSE", # included as license
|
||||
"/pyproject.toml", # included by default
|
||||
"/README.md", # included as readme
|
||||
"/setup.cfg",
|
||||
"/supportedsites.md",
|
||||
]
|
||||
exclude = ["/yt_dlp/__pyinstaller"]
|
||||
artifacts = [
|
||||
"/yt_dlp/extractor/lazy_extractors.py",
|
||||
"/completions",
|
||||
"/AUTHORS", # included by default
|
||||
"/README.txt",
|
||||
"/yt-dlp.1",
|
||||
]
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["yt_dlp"]
|
||||
exclude = ["/yt_dlp/__pyinstaller"]
|
||||
artifacts = ["/yt_dlp/extractor/lazy_extractors.py"]
|
||||
|
||||
[tool.hatch.build.targets.wheel.shared-data]
|
||||
"completions/bash/yt-dlp" = "share/bash-completion/completions/yt-dlp"
|
||||
"completions/zsh/_yt-dlp" = "share/zsh/site-functions/_yt-dlp"
|
||||
"completions/fish/yt-dlp.fish" = "share/fish/vendor_completions.d/yt-dlp.fish"
|
||||
"README.txt" = "share/doc/yt_dlp/README.txt"
|
||||
"yt-dlp.1" = "share/man/man1/yt-dlp.1"
|
||||
|
||||
[tool.hatch.version]
|
||||
path = "yt_dlp/version.py"
|
||||
pattern = "_pkg_version = '(?P<version>[^']+)'"
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
mutagen
|
||||
pycryptodomex
|
||||
brotli; implementation_name=='cpython'
|
||||
brotlicffi; implementation_name!='cpython'
|
||||
certifi
|
||||
requests>=2.31.0,<3
|
||||
urllib3>=1.26.17,<3
|
||||
websockets>=12.0
|
|
@ -1,7 +1,3 @@
|
|||
[wheel]
|
||||
universal = true
|
||||
|
||||
|
||||
[flake8]
|
||||
exclude = build,venv,.tox,.git,.pytest_cache
|
||||
ignore = E402,E501,E731,E741,W503
|
||||
|
|
183
setup.py
183
setup.py
|
@ -1,183 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Allow execution from anywhere
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
import subprocess
|
||||
import warnings
|
||||
|
||||
try:
|
||||
from setuptools import Command, find_packages, setup
|
||||
setuptools_available = True
|
||||
except ImportError:
|
||||
from distutils.core import Command, setup
|
||||
setuptools_available = False
|
||||
|
||||
from devscripts.utils import read_file, read_version
|
||||
|
||||
VERSION = read_version(varname='_pkg_version')
|
||||
|
||||
DESCRIPTION = 'A youtube-dl fork with additional features and patches'
|
||||
|
||||
LONG_DESCRIPTION = '\n\n'.join((
|
||||
'Official repository: <https://github.com/yt-dlp/yt-dlp>',
|
||||
'**PS**: Some links in this document will not work since this is a copy of the README.md from Github',
|
||||
read_file('README.md')))
|
||||
|
||||
REQUIREMENTS = read_file('requirements.txt').splitlines()
|
||||
|
||||
|
||||
def packages():
|
||||
if setuptools_available:
|
||||
return find_packages(exclude=('youtube_dl', 'youtube_dlc', 'test', 'ytdlp_plugins', 'devscripts'))
|
||||
|
||||
return [
|
||||
'yt_dlp', 'yt_dlp.extractor', 'yt_dlp.downloader', 'yt_dlp.postprocessor', 'yt_dlp.compat',
|
||||
]
|
||||
|
||||
|
||||
def py2exe_params():
|
||||
warnings.warn(
|
||||
'py2exe builds do not support pycryptodomex and needs VC++14 to run. '
|
||||
'It is recommended to run "pyinst.py" to build using pyinstaller instead')
|
||||
|
||||
return {
|
||||
'console': [{
|
||||
'script': './yt_dlp/__main__.py',
|
||||
'dest_base': 'yt-dlp',
|
||||
'icon_resources': [(1, 'devscripts/logo.ico')],
|
||||
}],
|
||||
'version_info': {
|
||||
'version': VERSION,
|
||||
'description': DESCRIPTION,
|
||||
'comments': LONG_DESCRIPTION.split('\n')[0],
|
||||
'product_name': 'yt-dlp',
|
||||
'product_version': VERSION,
|
||||
},
|
||||
'options': {
|
||||
'bundle_files': 0,
|
||||
'compressed': 1,
|
||||
'optimize': 2,
|
||||
'dist_dir': './dist',
|
||||
'excludes': [
|
||||
# py2exe cannot import Crypto
|
||||
'Crypto',
|
||||
'Cryptodome',
|
||||
# py2exe appears to confuse this with our socks library.
|
||||
# We don't use pysocks and urllib3.contrib.socks would fail to import if tried.
|
||||
'urllib3.contrib.socks'
|
||||
],
|
||||
'dll_excludes': ['w9xpopen.exe', 'crypt32.dll'],
|
||||
# Modules that are only imported dynamically must be added here
|
||||
'includes': ['yt_dlp.compat._legacy', 'yt_dlp.compat._deprecated',
|
||||
'yt_dlp.utils._legacy', 'yt_dlp.utils._deprecated'],
|
||||
},
|
||||
'zipfile': None,
|
||||
}
|
||||
|
||||
|
||||
def build_params():
|
||||
files_spec = [
|
||||
('share/bash-completion/completions', ['completions/bash/yt-dlp']),
|
||||
('share/zsh/site-functions', ['completions/zsh/_yt-dlp']),
|
||||
('share/fish/vendor_completions.d', ['completions/fish/yt-dlp.fish']),
|
||||
('share/doc/yt_dlp', ['README.txt']),
|
||||
('share/man/man1', ['yt-dlp.1'])
|
||||
]
|
||||
data_files = []
|
||||
for dirname, files in files_spec:
|
||||
resfiles = []
|
||||
for fn in files:
|
||||
if not os.path.exists(fn):
|
||||
warnings.warn(f'Skipping file {fn} since it is not present. Try running " make pypi-files " first')
|
||||
else:
|
||||
resfiles.append(fn)
|
||||
data_files.append((dirname, resfiles))
|
||||
|
||||
params = {'data_files': data_files}
|
||||
|
||||
if setuptools_available:
|
||||
params['entry_points'] = {
|
||||
'console_scripts': ['yt-dlp = yt_dlp:main'],
|
||||
'pyinstaller40': ['hook-dirs = yt_dlp.__pyinstaller:get_hook_dirs'],
|
||||
}
|
||||
else:
|
||||
params['scripts'] = ['yt-dlp']
|
||||
return params
|
||||
|
||||
|
||||
class build_lazy_extractors(Command):
|
||||
description = 'Build the extractor lazy loading module'
|
||||
user_options = []
|
||||
|
||||
def initialize_options(self):
|
||||
pass
|
||||
|
||||
def finalize_options(self):
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
if self.dry_run:
|
||||
print('Skipping build of lazy extractors in dry run mode')
|
||||
return
|
||||
subprocess.run([sys.executable, 'devscripts/make_lazy_extractors.py'])
|
||||
|
||||
|
||||
def main():
|
||||
if sys.argv[1:2] == ['py2exe']:
|
||||
params = py2exe_params()
|
||||
try:
|
||||
from py2exe import freeze
|
||||
except ImportError:
|
||||
import py2exe # noqa: F401
|
||||
warnings.warn('You are using an outdated version of py2exe. Support for this version will be removed in the future')
|
||||
params['console'][0].update(params.pop('version_info'))
|
||||
params['options'] = {'py2exe': params.pop('options')}
|
||||
else:
|
||||
return freeze(**params)
|
||||
else:
|
||||
params = build_params()
|
||||
|
||||
setup(
|
||||
name='yt-dlp', # package name (do not change/remove comment)
|
||||
version=VERSION,
|
||||
maintainer='pukkandan',
|
||||
maintainer_email='pukkandan.ytdlp@gmail.com',
|
||||
description=DESCRIPTION,
|
||||
long_description=LONG_DESCRIPTION,
|
||||
long_description_content_type='text/markdown',
|
||||
url='https://github.com/yt-dlp/yt-dlp',
|
||||
packages=packages(),
|
||||
install_requires=REQUIREMENTS,
|
||||
python_requires='>=3.8',
|
||||
project_urls={
|
||||
'Documentation': 'https://github.com/yt-dlp/yt-dlp#readme',
|
||||
'Source': 'https://github.com/yt-dlp/yt-dlp',
|
||||
'Tracker': 'https://github.com/yt-dlp/yt-dlp/issues',
|
||||
'Funding': 'https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators',
|
||||
},
|
||||
classifiers=[
|
||||
'Topic :: Multimedia :: Video',
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Environment :: Console',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Programming Language :: Python :: Implementation',
|
||||
'Programming Language :: Python :: Implementation :: CPython',
|
||||
'Programming Language :: Python :: Implementation :: PyPy',
|
||||
'License :: Public Domain',
|
||||
'Operating System :: OS Independent',
|
||||
],
|
||||
cmdclass={'build_lazy_extractors': build_lazy_extractors},
|
||||
**params
|
||||
)
|
||||
|
||||
|
||||
main()
|
|
@ -257,6 +257,7 @@
|
|||
from .bloomberg import BloombergIE
|
||||
from .bokecc import BokeCCIE
|
||||
from .bongacams import BongaCamsIE
|
||||
from .boosty import BoostyIE
|
||||
from .bostonglobe import BostonGlobeIE
|
||||
from .box import BoxIE
|
||||
from .boxcast import BoxCastVideoIE
|
||||
|
|
|
@ -78,14 +78,14 @@ class Ant1NewsGrArticleIE(AntennaBaseIE):
|
|||
|
||||
_TESTS = [{
|
||||
'url': 'https://www.ant1news.gr/afieromata/article/549468/o-tzeims-mpont-sta-meteora-oi-apeiles-kai-o-xesikomos-ton-kalogeron',
|
||||
'md5': '294f18331bb516539d72d85a82887dcc',
|
||||
'md5': '57eb8d12181f0fa2b14b0b138e1de9b6',
|
||||
'info_dict': {
|
||||
'id': '_xvg/m_cmbatw=',
|
||||
'ext': 'mp4',
|
||||
'title': 'md5:a93e8ecf2e4073bfdffcb38f59945411',
|
||||
'timestamp': 1603092840,
|
||||
'upload_date': '20201019',
|
||||
'thumbnail': 'https://ant1media.azureedge.net/imgHandler/640/756206d2-d640-40e2-b201-3555abdfc0db.jpg',
|
||||
'timestamp': 1666166520,
|
||||
'upload_date': '20221019',
|
||||
'thumbnail': 'https://ant1media.azureedge.net/imgHandler/1920/756206d2-d640-40e2-b201-3555abdfc0db.jpg',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://ant1news.gr/Society/article/620286/symmoria-anilikon-dikigoros-thymaton-ithelan-na-toys-apoteleiosoyn',
|
||||
|
@ -117,7 +117,7 @@ class Ant1NewsGrEmbedIE(AntennaBaseIE):
|
|||
_BASE_PLAYER_URL_RE = r'(?:https?:)?//(?:[a-zA-Z0-9\-]+\.)?(?:antenna|ant1news)\.gr/templates/pages/player'
|
||||
_VALID_URL = rf'{_BASE_PLAYER_URL_RE}\?([^#]+&)?cid=(?P<id>[^#&]+)'
|
||||
_EMBED_REGEX = [rf'<iframe[^>]+?src=(?P<_q1>["\'])(?P<url>{_BASE_PLAYER_URL_RE}\?(?:(?!(?P=_q1)).)+)(?P=_q1)']
|
||||
_API_PATH = '/news/templates/data/jsonPlayer'
|
||||
_API_PATH = '/templates/data/jsonPlayer'
|
||||
|
||||
_TESTS = [{
|
||||
'url': 'https://www.antenna.gr/templates/pages/player?cid=3f_li_c_az_jw_y_u=&w=670&h=377',
|
||||
|
|
209
yt_dlp/extractor/boosty.py
Normal file
209
yt_dlp/extractor/boosty.py
Normal file
|
@ -0,0 +1,209 @@
|
|||
from .common import InfoExtractor
|
||||
from .youtube import YoutubeIE
|
||||
from ..utils import (
|
||||
ExtractorError,
|
||||
int_or_none,
|
||||
qualities,
|
||||
str_or_none,
|
||||
url_or_none,
|
||||
)
|
||||
from ..utils.traversal import traverse_obj
|
||||
|
||||
|
||||
class BoostyIE(InfoExtractor):
|
||||
_VALID_URL = r'https?://(?:www\.)?boosty\.to/(?P<user>[^/#?]+)/posts/(?P<post_id>[^/#?]+)'
|
||||
_TESTS = [{
|
||||
# single ok_video
|
||||
'url': 'https://boosty.to/kuplinov/posts/e55d050c-e3bb-4873-a7db-ac7a49b40c38',
|
||||
'info_dict': {
|
||||
'id': 'd7473824-352e-48e2-ae53-d4aa39459968',
|
||||
'title': 'phasma_3',
|
||||
'channel': 'Kuplinov',
|
||||
'channel_id': '7958701',
|
||||
'timestamp': 1655031975,
|
||||
'upload_date': '20220612',
|
||||
'release_timestamp': 1655049000,
|
||||
'release_date': '20220612',
|
||||
'modified_timestamp': 1668680993,
|
||||
'modified_date': '20221117',
|
||||
'tags': ['куплинов', 'phasmophobia'],
|
||||
'like_count': int,
|
||||
'ext': 'mp4',
|
||||
'duration': 105,
|
||||
'view_count': int,
|
||||
'thumbnail': r're:^https://i\.mycdn\.me/videoPreview\?',
|
||||
},
|
||||
}, {
|
||||
# multiple ok_video
|
||||
'url': 'https://boosty.to/maddyson/posts/0c652798-3b35-471f-8b48-a76a0b28736f',
|
||||
'info_dict': {
|
||||
'id': '0c652798-3b35-471f-8b48-a76a0b28736f',
|
||||
'title': 'то что не пропустил юта6',
|
||||
'channel': 'Илья Давыдов',
|
||||
'channel_id': '6808257',
|
||||
'timestamp': 1694017040,
|
||||
'upload_date': '20230906',
|
||||
'release_timestamp': 1694017040,
|
||||
'release_date': '20230906',
|
||||
'modified_timestamp': 1694071178,
|
||||
'modified_date': '20230907',
|
||||
'like_count': int,
|
||||
},
|
||||
'playlist_count': 3,
|
||||
'playlist': [{
|
||||
'info_dict': {
|
||||
'id': 'cc325a9f-a563-41c6-bf47-516c1b506c9a',
|
||||
'title': 'то что не пропустил юта6',
|
||||
'channel': 'Илья Давыдов',
|
||||
'channel_id': '6808257',
|
||||
'timestamp': 1694017040,
|
||||
'upload_date': '20230906',
|
||||
'release_timestamp': 1694017040,
|
||||
'release_date': '20230906',
|
||||
'modified_timestamp': 1694071178,
|
||||
'modified_date': '20230907',
|
||||
'like_count': int,
|
||||
'ext': 'mp4',
|
||||
'duration': 31204,
|
||||
'view_count': int,
|
||||
'thumbnail': r're:^https://i\.mycdn\.me/videoPreview\?',
|
||||
},
|
||||
}, {
|
||||
'info_dict': {
|
||||
'id': 'd07b0a72-9493-4512-b54e-55ce468fd4b7',
|
||||
'title': 'то что не пропустил юта6',
|
||||
'channel': 'Илья Давыдов',
|
||||
'channel_id': '6808257',
|
||||
'timestamp': 1694017040,
|
||||
'upload_date': '20230906',
|
||||
'release_timestamp': 1694017040,
|
||||
'release_date': '20230906',
|
||||
'modified_timestamp': 1694071178,
|
||||
'modified_date': '20230907',
|
||||
'like_count': int,
|
||||
'ext': 'mp4',
|
||||
'duration': 25704,
|
||||
'view_count': int,
|
||||
'thumbnail': r're:^https://i\.mycdn\.me/videoPreview\?',
|
||||
},
|
||||
}, {
|
||||
'info_dict': {
|
||||
'id': '4a3bba32-78c8-422a-9432-2791aff60b42',
|
||||
'title': 'то что не пропустил юта6',
|
||||
'channel': 'Илья Давыдов',
|
||||
'channel_id': '6808257',
|
||||
'timestamp': 1694017040,
|
||||
'upload_date': '20230906',
|
||||
'release_timestamp': 1694017040,
|
||||
'release_date': '20230906',
|
||||
'modified_timestamp': 1694071178,
|
||||
'modified_date': '20230907',
|
||||
'like_count': int,
|
||||
'ext': 'mp4',
|
||||
'duration': 31867,
|
||||
'view_count': int,
|
||||
'thumbnail': r're:^https://i\.mycdn\.me/videoPreview\?',
|
||||
},
|
||||
}],
|
||||
}, {
|
||||
# single external video (youtube)
|
||||
'url': 'https://boosty.to/denischuzhoy/posts/6094a487-bcec-4cf8-a453-43313b463c38',
|
||||
'info_dict': {
|
||||
'id': 'EXelTnve5lY',
|
||||
'title': 'Послание Президента Федеральному Собранию | Класс народа',
|
||||
'upload_date': '20210425',
|
||||
'channel': 'Денис Чужой',
|
||||
'tags': 'count:10',
|
||||
'like_count': int,
|
||||
'ext': 'mp4',
|
||||
'duration': 816,
|
||||
'view_count': int,
|
||||
'thumbnail': r're:^https://i\.ytimg\.com/',
|
||||
'age_limit': 0,
|
||||
'availability': 'public',
|
||||
'categories': list,
|
||||
'channel_follower_count': int,
|
||||
'channel_id': 'UCCzVNbWZfYpBfyofCCUD_0w',
|
||||
'channel_is_verified': bool,
|
||||
'channel_url': r're:^https://www\.youtube\.com/',
|
||||
'comment_count': int,
|
||||
'description': str,
|
||||
'heatmap': 'count:100',
|
||||
'live_status': str,
|
||||
'playable_in_embed': bool,
|
||||
'uploader': str,
|
||||
'uploader_id': str,
|
||||
'uploader_url': r're:^https://www\.youtube\.com/',
|
||||
},
|
||||
}]
|
||||
|
||||
_MP4_TYPES = ('tiny', 'lowest', 'low', 'medium', 'high', 'full_hd', 'quad_hd', 'ultra_hd')
|
||||
|
||||
def _extract_formats(self, player_urls, video_id):
|
||||
formats = []
|
||||
quality = qualities(self._MP4_TYPES)
|
||||
for player_url in traverse_obj(player_urls, lambda _, v: url_or_none(v['url'])):
|
||||
url = player_url['url']
|
||||
format_type = player_url.get('type')
|
||||
if format_type in ('hls', 'hls_live', 'live_ondemand_hls', 'live_playback_hls'):
|
||||
formats.extend(self._extract_m3u8_formats(url, video_id, m3u8_id='hls', fatal=False))
|
||||
elif format_type in ('dash', 'dash_live', 'live_playback_dash'):
|
||||
formats.extend(self._extract_mpd_formats(url, video_id, mpd_id='dash', fatal=False))
|
||||
elif format_type in self._MP4_TYPES:
|
||||
formats.append({
|
||||
'url': url,
|
||||
'ext': 'mp4',
|
||||
'format_id': format_type,
|
||||
'quality': quality(format_type),
|
||||
})
|
||||
else:
|
||||
self.report_warning(f'Unknown format type: {format_type!r}')
|
||||
return formats
|
||||
|
||||
def _real_extract(self, url):
|
||||
user, post_id = self._match_valid_url(url).group('user', 'post_id')
|
||||
post = self._download_json(
|
||||
f'https://api.boosty.to/v1/blog/{user}/post/{post_id}', post_id,
|
||||
note='Downloading post data', errnote='Unable to download post data')
|
||||
|
||||
post_title = post.get('title')
|
||||
if not post_title:
|
||||
self.report_warning('Unable to extract post title. Falling back to parsing html page')
|
||||
webpage = self._download_webpage(url, video_id=post_id)
|
||||
post_title = self._og_search_title(webpage, default=None) or self._html_extract_title(webpage)
|
||||
|
||||
common_metadata = {
|
||||
'title': post_title,
|
||||
**traverse_obj(post, {
|
||||
'channel': ('user', 'name', {str}),
|
||||
'channel_id': ('user', 'id', {str_or_none}),
|
||||
'timestamp': ('createdAt', {int_or_none}),
|
||||
'release_timestamp': ('publishTime', {int_or_none}),
|
||||
'modified_timestamp': ('updatedAt', {int_or_none}),
|
||||
'tags': ('tags', ..., 'title', {str}),
|
||||
'like_count': ('count', 'likes', {int_or_none}),
|
||||
}),
|
||||
}
|
||||
entries = []
|
||||
for item in traverse_obj(post, ('data', ..., {dict})):
|
||||
item_type = item.get('type')
|
||||
if item_type == 'video' and url_or_none(item.get('url')):
|
||||
entries.append(self.url_result(item['url'], YoutubeIE))
|
||||
elif item_type == 'ok_video':
|
||||
video_id = item.get('id') or post_id
|
||||
entries.append({
|
||||
'id': video_id,
|
||||
'formats': self._extract_formats(item.get('playerUrls'), video_id),
|
||||
**common_metadata,
|
||||
**traverse_obj(item, {
|
||||
'title': ('title', {str}),
|
||||
'duration': ('duration', {int_or_none}),
|
||||
'view_count': ('viewsCounter', {int_or_none}),
|
||||
'thumbnail': (('previewUrl', 'defaultPreview'), {url_or_none}),
|
||||
}, get_all=False)})
|
||||
|
||||
if not entries:
|
||||
raise ExtractorError('No videos found', expected=True)
|
||||
if len(entries) == 1:
|
||||
return entries[0]
|
||||
return self.playlist_result(entries, post_id, post_title, **common_metadata)
|
|
@ -33,10 +33,7 @@ def _real_extract(self, url):
|
|||
webpage = self._download_webpage(
|
||||
'http://embed.crooksandliars.com/embed/%s' % video_id, video_id)
|
||||
|
||||
manifest = self._parse_json(
|
||||
self._search_regex(
|
||||
r'var\s+manifest\s*=\s*({.+?})\n', webpage, 'manifest JSON'),
|
||||
video_id)
|
||||
manifest = self._search_json(r'var\s+manifest\s*=', webpage, 'manifest JSON', video_id)
|
||||
|
||||
quality = qualities(('webm_low', 'mp4_low', 'webm_high', 'mp4_high'))
|
||||
|
||||
|
|
|
@ -1,25 +1,29 @@
|
|||
from .common import InfoExtractor
|
||||
from .nexx import NexxIE
|
||||
from ..utils import (
|
||||
int_or_none,
|
||||
str_or_none,
|
||||
)
|
||||
|
||||
|
||||
class FunkIE(InfoExtractor):
|
||||
_VALID_URL = r'https?://(?:www\.|origin\.)?funk\.net/(?:channel|playlist)/[^/]+/(?P<display_id>[0-9a-z-]+)-(?P<id>\d+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://www.funk.net/channel/ba-793/die-lustigsten-instrumente-aus-dem-internet-teil-2-1155821',
|
||||
'md5': '8dd9d9ab59b4aa4173b3197f2ea48e81',
|
||||
'md5': '8610449476156f338761a75391b0017d',
|
||||
'info_dict': {
|
||||
'id': '1155821',
|
||||
'ext': 'mp4',
|
||||
'title': 'Die LUSTIGSTEN INSTRUMENTE aus dem Internet - Teil 2',
|
||||
'description': 'md5:a691d0413ef4835588c5b03ded670c1f',
|
||||
'description': 'md5:2a03b67596eda0d1b5125c299f45e953',
|
||||
'timestamp': 1514507395,
|
||||
'upload_date': '20171229',
|
||||
'duration': 426.0,
|
||||
'cast': ['United Creators PMB GmbH'],
|
||||
'thumbnail': 'https://assets.nexx.cloud/media/75/56/79/3YKUSJN1LACN0CRxL.jpg',
|
||||
'display_id': 'die-lustigsten-instrumente-aus-dem-internet-teil-2',
|
||||
'alt_title': 'Die LUSTIGSTEN INSTRUMENTE aus dem Internet Teil 2',
|
||||
'season_number': 0,
|
||||
'season': 'Season 0',
|
||||
'episode_number': 0,
|
||||
'episode': 'Episode 0',
|
||||
},
|
||||
|
||||
}, {
|
||||
'url': 'https://www.funk.net/playlist/neuesteVideos/kameras-auf-dem-fusion-festival-1618699',
|
||||
'only_matching': True,
|
||||
|
@ -27,18 +31,10 @@ class FunkIE(InfoExtractor):
|
|||
|
||||
def _real_extract(self, url):
|
||||
display_id, nexx_id = self._match_valid_url(url).groups()
|
||||
video = self._download_json(
|
||||
'https://www.funk.net/api/v4.0/videos/' + nexx_id, nexx_id)
|
||||
return {
|
||||
'_type': 'url_transparent',
|
||||
'url': 'nexx:741:' + nexx_id,
|
||||
'url': f'nexx:741:{nexx_id}',
|
||||
'ie_key': NexxIE.ie_key(),
|
||||
'id': nexx_id,
|
||||
'title': video.get('title'),
|
||||
'description': video.get('description'),
|
||||
'duration': int_or_none(video.get('duration')),
|
||||
'channel_id': str_or_none(video.get('channelId')),
|
||||
'display_id': display_id,
|
||||
'tags': video.get('tags'),
|
||||
'thumbnail': video.get('imageUrlLandscape'),
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ class LeFigaroVideoEmbedIE(InfoExtractor):
|
|||
|
||||
_TESTS = [{
|
||||
'url': 'https://video.lefigaro.fr/embed/figaro/video/les-francais-ne-veulent-ils-plus-travailler-suivez-en-direct-le-club-le-figaro-idees/',
|
||||
'md5': 'e94de44cd80818084352fcf8de1ce82c',
|
||||
'md5': 'a0c3069b7e4c4526abf0053a7713f56f',
|
||||
'info_dict': {
|
||||
'id': 'g9j7Eovo',
|
||||
'title': 'Les Français ne veulent-ils plus travailler ? Retrouvez Le Club Le Figaro Idées',
|
||||
|
@ -26,7 +26,7 @@ class LeFigaroVideoEmbedIE(InfoExtractor):
|
|||
},
|
||||
}, {
|
||||
'url': 'https://video.lefigaro.fr/embed/figaro/video/intelligence-artificielle-faut-il-sen-mefier/',
|
||||
'md5': '0b3f10332b812034b3a3eda1ef877c5f',
|
||||
'md5': '319c662943dd777bab835cae1e2d73a5',
|
||||
'info_dict': {
|
||||
'id': 'LeAgybyc',
|
||||
'title': 'Intelligence artificielle : faut-il s’en méfier ?',
|
||||
|
@ -41,7 +41,7 @@ class LeFigaroVideoEmbedIE(InfoExtractor):
|
|||
|
||||
_WEBPAGE_TESTS = [{
|
||||
'url': 'https://video.lefigaro.fr/figaro/video/suivez-en-direct-le-club-le-figaro-international-avec-philippe-gelie-9/',
|
||||
'md5': '3972ddf2d5f8b98699f191687258e2f9',
|
||||
'md5': '6289f9489efb969e38245f31721596fe',
|
||||
'info_dict': {
|
||||
'id': 'QChnbPYA',
|
||||
'title': 'Où en est le couple franco-allemand ? Retrouvez Le Club Le Figaro International',
|
||||
|
@ -55,7 +55,7 @@ class LeFigaroVideoEmbedIE(InfoExtractor):
|
|||
},
|
||||
}, {
|
||||
'url': 'https://video.lefigaro.fr/figaro/video/la-philosophe-nathalie-sarthou-lajus-est-linvitee-du-figaro-live/',
|
||||
'md5': '3ac0a0769546ee6be41ab52caea5d9a9',
|
||||
'md5': 'f6df814cae53e85937621599d2967520',
|
||||
'info_dict': {
|
||||
'id': 'QJzqoNbf',
|
||||
'title': 'La philosophe Nathalie Sarthou-Lajus est l’invitée du Figaro Live',
|
||||
|
@ -73,7 +73,8 @@ def _real_extract(self, url):
|
|||
display_id = self._match_id(url)
|
||||
webpage = self._download_webpage(url, display_id)
|
||||
|
||||
player_data = self._search_nextjs_data(webpage, display_id)['props']['pageProps']['pageData']['playerData']
|
||||
player_data = self._search_nextjs_data(
|
||||
webpage, display_id)['props']['pageProps']['initialProps']['pageData']['playerData']
|
||||
|
||||
return self.url_result(
|
||||
f'jwplatform:{player_data["videoId"]}', title=player_data.get('title'),
|
||||
|
|
|
@ -28,12 +28,24 @@ class MagellanTVIE(InfoExtractor):
|
|||
'tags': ['Investigation', 'True Crime', 'Justice', 'Europe'],
|
||||
},
|
||||
'params': {'skip_download': 'm3u8'},
|
||||
}, {
|
||||
'url': 'https://www.magellantv.com/watch/celebration-nation',
|
||||
'info_dict': {
|
||||
'id': 'celebration-nation',
|
||||
'ext': 'mp4',
|
||||
'tags': ['Art & Culture', 'Human Interest', 'Anthropology', 'China', 'History'],
|
||||
'duration': 2640.0,
|
||||
'title': 'Ancestors',
|
||||
},
|
||||
'params': {'skip_download': 'm3u8'},
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
webpage = self._download_webpage(url, video_id)
|
||||
data = self._search_nextjs_data(webpage, video_id)['props']['pageProps']['reactContext']['video']['detail']
|
||||
data = traverse_obj(self._search_nextjs_data(webpage, video_id), (
|
||||
'props', 'pageProps', 'reactContext',
|
||||
(('video', 'detail'), ('series', 'currentEpisode')), {dict}), get_all=False)
|
||||
formats, subtitles = self._extract_m3u8_formats_and_subtitles(data['jwpVideoUrl'], video_id)
|
||||
|
||||
return {
|
||||
|
|
|
@ -32,6 +32,7 @@ class NYTimesBaseIE(InfoExtractor):
|
|||
renderedRepresentation
|
||||
}
|
||||
duration
|
||||
firstPublished
|
||||
promotionalHeadline
|
||||
promotionalMedia {
|
||||
... on Image {
|
||||
|
@ -124,6 +125,7 @@ def _extract_video(self, media_id):
|
|||
'id': media_id,
|
||||
'title': data.get('promotionalHeadline'),
|
||||
'description': data.get('summary'),
|
||||
'timestamp': parse_iso8601(data.get('firstPublished')),
|
||||
'duration': float_or_none(data.get('duration'), scale=1000),
|
||||
'creator': ', '.join(traverse_obj(data, ( # TODO: change to 'creators'
|
||||
'bylines', ..., 'renderedRepresentation', {lambda x: remove_start(x, 'By ')}))),
|
||||
|
@ -145,8 +147,8 @@ class NYTimesIE(NYTimesBaseIE):
|
|||
'ext': 'mp4',
|
||||
'title': 'Verbatim: What Is a Photocopier?',
|
||||
'description': 'md5:93603dada88ddbda9395632fdc5da260',
|
||||
'timestamp': 1398631707, # FIXME
|
||||
'upload_date': '20140427', # FIXME
|
||||
'timestamp': 1398646132,
|
||||
'upload_date': '20140428',
|
||||
'creator': 'Brett Weiner',
|
||||
'thumbnail': r're:https?://\w+\.nyt.com/images/.+\.jpg',
|
||||
'duration': 419,
|
||||
|
@ -310,6 +312,8 @@ class NYTimesCookingIE(NYTimesBaseIE):
|
|||
'ext': 'mp4',
|
||||
'title': 'How to Make Mac and Cheese',
|
||||
'description': 'md5:b8f2f33ec1fb7523b21367147c9594f1',
|
||||
'timestamp': 1522950315,
|
||||
'upload_date': '20180405',
|
||||
'duration': 9.51,
|
||||
'creator': 'Alison Roman',
|
||||
'thumbnail': r're:https?://\w+\.nyt.com/images/.*\.jpg',
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -21,7 +21,7 @@
|
|||
class TVPIE(InfoExtractor):
|
||||
IE_NAME = 'tvp'
|
||||
IE_DESC = 'Telewizja Polska'
|
||||
_VALID_URL = r'https?://(?:[^/]+\.)?(?:tvp(?:parlament)?\.(?:pl|info)|tvpworld\.com|swipeto\.pl)/(?:(?!\d+/)[^/]+/)*(?P<id>\d+)'
|
||||
_VALID_URL = r'https?://(?:[^/]+\.)?(?:tvp(?:parlament)?\.(?:pl|info)|tvpworld\.com|swipeto\.pl)/(?:(?!\d+/)[^/]+/)*(?P<id>\d+)(?:[/?#]|$)'
|
||||
|
||||
_TESTS = [{
|
||||
# TVPlayer 2 in js wrapper
|
||||
|
@ -514,7 +514,7 @@ def _parse_video(self, video, with_url=True):
|
|||
|
||||
class TVPVODVideoIE(TVPVODBaseIE):
|
||||
IE_NAME = 'tvp:vod'
|
||||
_VALID_URL = r'https?://vod\.tvp\.pl/[a-z\d-]+,\d+/[a-z\d-]+(?<!-odcinki)(?:-odcinki,\d+/odcinek-\d+,S\d+E\d+)?,(?P<id>\d+)(?:\?[^#]+)?(?:#.+)?$'
|
||||
_VALID_URL = r'https?://vod\.tvp\.pl/(?P<category>[a-z\d-]+,\d+)/[a-z\d-]+(?<!-odcinki)(?:-odcinki,\d+/odcinek-\d+,S\d+E\d+)?,(?P<id>\d+)/?(?:[?#]|$)'
|
||||
|
||||
_TESTS = [{
|
||||
'url': 'https://vod.tvp.pl/dla-dzieci,24/laboratorium-alchemika-odcinki,309338/odcinek-24,S01E24,311357',
|
||||
|
@ -560,12 +560,23 @@ class TVPVODVideoIE(TVPVODBaseIE):
|
|||
'thumbnail': 're:https?://.+',
|
||||
},
|
||||
'params': {'skip_download': 'm3u8'},
|
||||
}, {
|
||||
'url': 'https://vod.tvp.pl/live,1/tvp-world,399731',
|
||||
'info_dict': {
|
||||
'id': '399731',
|
||||
'ext': 'mp4',
|
||||
'title': r're:TVP WORLD \d{4}-\d{2}-\d{2} \d{2}:\d{2}',
|
||||
'live_status': 'is_live',
|
||||
'thumbnail': 're:https?://.+',
|
||||
},
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
category, video_id = self._match_valid_url(url).group('category', 'id')
|
||||
|
||||
info_dict = self._parse_video(self._call_api(f'vods/{video_id}', video_id), with_url=False)
|
||||
is_live = category == 'live,1'
|
||||
entity = 'lives' if is_live else 'vods'
|
||||
info_dict = self._parse_video(self._call_api(f'{entity}/{video_id}', video_id), with_url=False)
|
||||
|
||||
playlist = self._call_api(f'{video_id}/videos/playlist', video_id, query={'videoType': 'MOVIE'})
|
||||
|
||||
|
@ -582,6 +593,8 @@ def _real_extract(self, url):
|
|||
'ext': 'ttml',
|
||||
})
|
||||
|
||||
info_dict['is_live'] = is_live
|
||||
|
||||
return info_dict
|
||||
|
||||
|
||||
|
|
Loading…
Reference in a new issue