mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2024-11-30 12:01:28 +00:00
[devscripts/cli_to_api] Add script
This commit is contained in:
parent
69a40e4a7f
commit
46f1370e9a
48
devscripts/cli_to_api.py
Normal file
48
devscripts/cli_to_api.py
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import yt_dlp
|
||||||
|
import yt_dlp.options
|
||||||
|
|
||||||
|
create_parser = yt_dlp.options.create_parser
|
||||||
|
|
||||||
|
|
||||||
|
def parse_patched_options(opts):
|
||||||
|
patched_parser = create_parser()
|
||||||
|
patched_parser.defaults.update({
|
||||||
|
'ignoreerrors': False,
|
||||||
|
'retries': 0,
|
||||||
|
'fragment_retries': 0,
|
||||||
|
'extract_flat': False,
|
||||||
|
'concat_playlist': 'never',
|
||||||
|
})
|
||||||
|
yt_dlp.options.__dict__['create_parser'] = lambda: patched_parser
|
||||||
|
try:
|
||||||
|
return yt_dlp.parse_options(opts)
|
||||||
|
finally:
|
||||||
|
yt_dlp.options.__dict__['create_parser'] = create_parser
|
||||||
|
|
||||||
|
|
||||||
|
default_opts = parse_patched_options([]).ydl_opts
|
||||||
|
|
||||||
|
|
||||||
|
def cli_to_api(opts, cli_defaults=False):
|
||||||
|
opts = (yt_dlp.parse_options if cli_defaults else parse_patched_options)(opts).ydl_opts
|
||||||
|
|
||||||
|
diff = {k: v for k, v in opts.items() if default_opts[k] != v}
|
||||||
|
if 'postprocessors' in diff:
|
||||||
|
diff['postprocessors'] = [pp for pp in diff['postprocessors']
|
||||||
|
if pp not in default_opts['postprocessors']]
|
||||||
|
return diff
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
from pprint import pprint
|
||||||
|
|
||||||
|
print('\nThe arguments passed translate to:\n')
|
||||||
|
pprint(cli_to_api(sys.argv[1:]))
|
||||||
|
print('\nCombining these with the CLI defaults gives:\n')
|
||||||
|
pprint(cli_to_api(sys.argv[1:], True))
|
|
@ -280,7 +280,7 @@ class YoutubeDL:
|
||||||
subtitles. The language can be prefixed with a "-" to
|
subtitles. The language can be prefixed with a "-" to
|
||||||
exclude it from the requested languages, e.g. ['all', '-live_chat']
|
exclude it from the requested languages, e.g. ['all', '-live_chat']
|
||||||
keepvideo: Keep the video file after post-processing
|
keepvideo: Keep the video file after post-processing
|
||||||
daterange: A DateRange object, download only if the upload_date is in the range.
|
daterange: A utils.DateRange object, download only if the upload_date is in the range.
|
||||||
skip_download: Skip the actual download of the video file
|
skip_download: Skip the actual download of the video file
|
||||||
cachedir: Location of the cache files in the filesystem.
|
cachedir: Location of the cache files in the filesystem.
|
||||||
False to disable filesystem cache.
|
False to disable filesystem cache.
|
||||||
|
@ -329,13 +329,13 @@ class YoutubeDL:
|
||||||
'auto' for elaborate guessing
|
'auto' for elaborate guessing
|
||||||
encoding: Use this encoding instead of the system-specified.
|
encoding: Use this encoding instead of the system-specified.
|
||||||
extract_flat: Whether to resolve and process url_results further
|
extract_flat: Whether to resolve and process url_results further
|
||||||
* False: Always process (default)
|
* False: Always process. Default for API
|
||||||
* True: Never process
|
* True: Never process
|
||||||
* 'in_playlist': Do not process inside playlist/multi_video
|
* 'in_playlist': Do not process inside playlist/multi_video
|
||||||
* 'discard': Always process, but don't return the result
|
* 'discard': Always process, but don't return the result
|
||||||
from inside playlist/multi_video
|
from inside playlist/multi_video
|
||||||
* 'discard_in_playlist': Same as "discard", but only for
|
* 'discard_in_playlist': Same as "discard", but only for
|
||||||
playlists (not multi_video)
|
playlists (not multi_video). Default for CLI
|
||||||
wait_for_video: If given, wait for scheduled streams to become available.
|
wait_for_video: If given, wait for scheduled streams to become available.
|
||||||
The value should be a tuple containing the range
|
The value should be a tuple containing the range
|
||||||
(min_secs, max_secs) to wait between retries
|
(min_secs, max_secs) to wait between retries
|
||||||
|
@ -472,7 +472,7 @@ class YoutubeDL:
|
||||||
can also be used
|
can also be used
|
||||||
|
|
||||||
The following options are used by the extractors:
|
The following options are used by the extractors:
|
||||||
extractor_retries: Number of times to retry for known errors
|
extractor_retries: Number of times to retry for known errors (default: 3)
|
||||||
dynamic_mpd: Whether to process dynamic DASH manifests (default: True)
|
dynamic_mpd: Whether to process dynamic DASH manifests (default: True)
|
||||||
hls_split_discontinuity: Split HLS playlists to different formats at
|
hls_split_discontinuity: Split HLS playlists to different formats at
|
||||||
discontinuities such as ad breaks (default: False)
|
discontinuities such as ad breaks (default: False)
|
||||||
|
|
|
@ -51,8 +51,9 @@ class FileDownloader:
|
||||||
ratelimit: Download speed limit, in bytes/sec.
|
ratelimit: Download speed limit, in bytes/sec.
|
||||||
continuedl: Attempt to continue downloads if possible
|
continuedl: Attempt to continue downloads if possible
|
||||||
throttledratelimit: Assume the download is being throttled below this speed (bytes/sec)
|
throttledratelimit: Assume the download is being throttled below this speed (bytes/sec)
|
||||||
retries: Number of times to retry for HTTP error 5xx
|
retries: Number of times to retry for expected network errors.
|
||||||
file_access_retries: Number of times to retry on file access error
|
Default is 0 for API, but 10 for CLI
|
||||||
|
file_access_retries: Number of times to retry on file access error (default: 3)
|
||||||
buffersize: Size of download buffer in bytes.
|
buffersize: Size of download buffer in bytes.
|
||||||
noresizebuffer: Do not automatically resize the download buffer.
|
noresizebuffer: Do not automatically resize the download buffer.
|
||||||
continuedl: Try to continue downloads if possible.
|
continuedl: Try to continue downloads if possible.
|
||||||
|
@ -225,7 +226,7 @@ def error_callback(err, count, retries, *, fd):
|
||||||
sleep_func=fd.params.get('retry_sleep_functions', {}).get('file_access'))
|
sleep_func=fd.params.get('retry_sleep_functions', {}).get('file_access'))
|
||||||
|
|
||||||
def wrapper(self, func, *args, **kwargs):
|
def wrapper(self, func, *args, **kwargs):
|
||||||
for retry in RetryManager(self.params.get('file_access_retries'), error_callback, fd=self):
|
for retry in RetryManager(self.params.get('file_access_retries', 3), error_callback, fd=self):
|
||||||
try:
|
try:
|
||||||
return func(self, *args, **kwargs)
|
return func(self, *args, **kwargs)
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
|
|
|
@ -34,8 +34,8 @@ class FragmentFD(FileDownloader):
|
||||||
|
|
||||||
Available options:
|
Available options:
|
||||||
|
|
||||||
fragment_retries: Number of times to retry a fragment for HTTP error (DASH
|
fragment_retries: Number of times to retry a fragment for HTTP error
|
||||||
and hlsnative only)
|
(DASH and hlsnative only). Default is 0 for API, but 10 for CLI
|
||||||
skip_unavailable_fragments:
|
skip_unavailable_fragments:
|
||||||
Skip unavailable fragments (DASH and hlsnative only)
|
Skip unavailable fragments (DASH and hlsnative only)
|
||||||
keep_fragments: Keep downloaded fragments on disk after downloading is
|
keep_fragments: Keep downloaded fragments on disk after downloading is
|
||||||
|
|
|
@ -60,6 +60,8 @@
|
||||||
from ..dependencies import brotli, certifi, websockets, xattr
|
from ..dependencies import brotli, certifi, websockets, xattr
|
||||||
from ..socks import ProxyType, sockssocket
|
from ..socks import ProxyType, sockssocket
|
||||||
|
|
||||||
|
__name__ = __name__.rsplit('.', 1)[0] # Pretend to be the parent module
|
||||||
|
|
||||||
# This is not clearly defined otherwise
|
# This is not clearly defined otherwise
|
||||||
compiled_regex_type = type(re.compile(''))
|
compiled_regex_type = type(re.compile(''))
|
||||||
|
|
||||||
|
@ -1957,8 +1959,8 @@ def __contains__(self, date):
|
||||||
date = date_from_str(date)
|
date = date_from_str(date)
|
||||||
return self.start <= date <= self.end
|
return self.start <= date <= self.end
|
||||||
|
|
||||||
def __str__(self):
|
def __repr__(self):
|
||||||
return f'{self.start.isoformat()} - {self.end.isoformat()}'
|
return f'{__name__}.{type(self).__name__}({self.start.isoformat()!r}, {self.end.isoformat()!r})'
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return (isinstance(other, DateRange)
|
return (isinstance(other, DateRange)
|
||||||
|
|
Loading…
Reference in a new issue