2022-04-17 17:18:50 +00:00
|
|
|
import enum
|
2024-07-01 22:51:27 +00:00
|
|
|
import functools
|
2023-01-01 17:16:25 +00:00
|
|
|
import json
|
2023-07-05 20:16:28 +00:00
|
|
|
import os
|
2018-03-24 09:29:03 +00:00
|
|
|
import re
|
2015-01-24 00:38:48 +00:00
|
|
|
import subprocess
|
2016-02-19 18:29:24 +00:00
|
|
|
import sys
|
2023-07-05 20:16:28 +00:00
|
|
|
import tempfile
|
2018-03-24 09:29:03 +00:00
|
|
|
import time
|
2023-01-01 17:16:25 +00:00
|
|
|
import uuid
|
2021-02-08 16:46:01 +00:00
|
|
|
|
2021-09-21 23:57:07 +00:00
|
|
|
from .fragment import FragmentFD
|
2023-07-09 07:53:02 +00:00
|
|
|
from ..networking import Request
|
2022-04-11 22:32:57 +00:00
|
|
|
from ..postprocessor.ffmpeg import EXT_TO_OUT_FORMATS, FFmpegPostProcessor
|
2015-01-24 00:38:48 +00:00
|
|
|
from ..utils import (
|
2022-04-11 22:32:57 +00:00
|
|
|
Popen,
|
2022-08-01 20:13:18 +00:00
|
|
|
RetryManager,
|
2022-04-11 22:32:57 +00:00
|
|
|
_configuration_args,
|
|
|
|
check_executable,
|
2022-03-25 07:08:33 +00:00
|
|
|
classproperty,
|
2022-04-11 22:32:57 +00:00
|
|
|
cli_bool_option,
|
2015-09-04 21:06:28 +00:00
|
|
|
cli_option,
|
|
|
|
cli_valueless_option,
|
2022-01-23 19:45:44 +00:00
|
|
|
determine_ext,
|
2015-04-25 22:33:43 +00:00
|
|
|
encodeArgument,
|
2023-01-01 17:16:25 +00:00
|
|
|
find_available_port,
|
2022-01-23 19:45:44 +00:00
|
|
|
remove_end,
|
2022-04-27 16:22:57 +00:00
|
|
|
traverse_obj,
|
2015-01-24 00:38:48 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-04-17 17:18:50 +00:00
|
|
|
class Features(enum.Enum):
|
|
|
|
TO_STDOUT = enum.auto()
|
|
|
|
MULTIPLE_FORMATS = enum.auto()
|
|
|
|
|
|
|
|
|
2021-09-21 23:57:07 +00:00
|
|
|
class ExternalFD(FragmentFD):
|
2021-02-08 16:46:01 +00:00
|
|
|
SUPPORTED_PROTOCOLS = ('http', 'https', 'ftp', 'ftps')
|
2022-04-17 17:18:50 +00:00
|
|
|
SUPPORTED_FEATURES = ()
|
2022-06-15 20:55:43 +00:00
|
|
|
_CAPTURE_STDERR = True
|
2021-02-08 16:46:01 +00:00
|
|
|
|
2015-01-24 00:38:48 +00:00
|
|
|
def real_download(self, filename, info_dict):
|
|
|
|
self.report_destination(filename)
|
|
|
|
tmpfilename = self.temp_name(filename)
|
2023-07-05 20:16:28 +00:00
|
|
|
self._cookies_tempfile = None
|
2015-01-24 00:38:48 +00:00
|
|
|
|
2017-04-28 21:33:35 +00:00
|
|
|
try:
|
2018-03-24 09:29:03 +00:00
|
|
|
started = time.time()
|
2017-04-28 21:33:35 +00:00
|
|
|
retval = self._call_downloader(tmpfilename, info_dict)
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
if not info_dict.get('is_live'):
|
|
|
|
raise
|
|
|
|
# Live stream downloading cancellation should be considered as
|
|
|
|
# correct and expected termination thus all postprocessing
|
|
|
|
# should take place
|
|
|
|
retval = 0
|
2024-06-11 23:09:58 +00:00
|
|
|
self.to_screen(f'[{self.get_basename()}] Interrupted by user')
|
2023-07-05 20:16:28 +00:00
|
|
|
finally:
|
|
|
|
if self._cookies_tempfile:
|
|
|
|
self.try_remove(self._cookies_tempfile)
|
2017-04-28 21:33:35 +00:00
|
|
|
|
2015-01-24 00:38:48 +00:00
|
|
|
if retval == 0:
|
2018-03-24 09:29:03 +00:00
|
|
|
status = {
|
|
|
|
'filename': filename,
|
|
|
|
'status': 'finished',
|
|
|
|
'elapsed': time.time() - started,
|
|
|
|
}
|
|
|
|
if filename != '-':
|
2024-11-16 23:24:11 +00:00
|
|
|
fsize = os.path.getsize(tmpfilename)
|
2016-10-08 13:27:24 +00:00
|
|
|
self.try_rename(tmpfilename, filename)
|
2018-03-24 09:29:03 +00:00
|
|
|
status.update({
|
2016-10-08 13:27:24 +00:00
|
|
|
'downloaded_bytes': fsize,
|
|
|
|
'total_bytes': fsize,
|
|
|
|
})
|
2021-07-21 17:28:43 +00:00
|
|
|
self._hook_progress(status, info_dict)
|
2015-01-24 00:38:48 +00:00
|
|
|
return True
|
|
|
|
else:
|
|
|
|
self.to_stderr('\n')
|
2024-11-22 22:11:00 +00:00
|
|
|
self.report_error(f'{self.get_basename()} exited with code {retval}')
|
2015-01-24 00:38:48 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_basename(cls):
|
|
|
|
return cls.__name__[:-2].lower()
|
|
|
|
|
2022-03-25 07:08:33 +00:00
|
|
|
@classproperty
|
|
|
|
def EXE_NAME(cls):
|
|
|
|
return cls.get_basename()
|
|
|
|
|
2022-05-20 15:25:21 +00:00
|
|
|
@functools.cached_property
|
2015-01-24 00:38:48 +00:00
|
|
|
def exe(self):
|
2022-03-25 07:08:33 +00:00
|
|
|
return self.EXE_NAME
|
2015-01-24 00:38:48 +00:00
|
|
|
|
2016-02-20 06:58:25 +00:00
|
|
|
@classmethod
|
2021-02-27 11:22:27 +00:00
|
|
|
def available(cls, path=None):
|
2022-03-25 07:08:33 +00:00
|
|
|
path = check_executable(
|
|
|
|
cls.EXE_NAME if path in (None, cls.get_basename()) else path,
|
|
|
|
[cls.AVAILABLE_OPT])
|
|
|
|
if not path:
|
|
|
|
return False
|
|
|
|
cls.exe = path
|
|
|
|
return path
|
2016-02-20 06:58:25 +00:00
|
|
|
|
2015-01-24 00:38:48 +00:00
|
|
|
@classmethod
|
|
|
|
def supports(cls, info_dict):
|
2022-04-17 17:18:50 +00:00
|
|
|
return all((
|
|
|
|
not info_dict.get('to_stdout') or Features.TO_STDOUT in cls.SUPPORTED_FEATURES,
|
|
|
|
'+' not in info_dict['protocol'] or Features.MULTIPLE_FORMATS in cls.SUPPORTED_FEATURES,
|
2024-06-03 16:22:49 +00:00
|
|
|
not traverse_obj(info_dict, ('hls_aes', ...), 'extra_param_to_segment_url', 'extra_param_to_key_url'),
|
2022-04-17 17:18:50 +00:00
|
|
|
all(proto in cls.SUPPORTED_PROTOCOLS for proto in info_dict['protocol'].split('+')),
|
|
|
|
))
|
2015-01-24 00:38:48 +00:00
|
|
|
|
2016-03-13 13:53:17 +00:00
|
|
|
@classmethod
|
2021-02-27 11:22:27 +00:00
|
|
|
def can_download(cls, info_dict, path=None):
|
|
|
|
return cls.available(path) and cls.supports(info_dict)
|
2016-03-13 13:53:17 +00:00
|
|
|
|
2015-08-11 17:00:45 +00:00
|
|
|
def _option(self, command_option, param):
|
2015-09-04 21:06:28 +00:00
|
|
|
return cli_option(self.params, command_option, param)
|
2015-08-11 17:00:45 +00:00
|
|
|
|
2015-09-04 19:07:36 +00:00
|
|
|
def _bool_option(self, command_option, param, true_value='true', false_value='false', separator=None):
|
2015-09-04 21:06:28 +00:00
|
|
|
return cli_bool_option(self.params, command_option, param, true_value, false_value, separator)
|
2015-09-04 19:07:36 +00:00
|
|
|
|
2015-09-04 20:12:13 +00:00
|
|
|
def _valueless_option(self, command_option, param, expected_value=True):
|
2015-09-04 21:06:28 +00:00
|
|
|
return cli_valueless_option(self.params, command_option, param, expected_value)
|
2015-09-04 19:57:19 +00:00
|
|
|
|
2021-08-23 21:45:44 +00:00
|
|
|
def _configuration_args(self, keys=None, *args, **kwargs):
|
|
|
|
return _configuration_args(
|
2022-03-25 07:08:33 +00:00
|
|
|
self.get_basename(), self.params.get('external_downloader_args'), self.EXE_NAME,
|
2021-08-23 21:45:44 +00:00
|
|
|
keys, *args, **kwargs)
|
2015-03-02 14:06:09 +00:00
|
|
|
|
2023-07-05 20:16:28 +00:00
|
|
|
def _write_cookies(self):
|
|
|
|
if not self.ydl.cookiejar.filename:
|
|
|
|
tmp_cookies = tempfile.NamedTemporaryFile(suffix='.cookies', delete=False)
|
|
|
|
tmp_cookies.close()
|
|
|
|
self._cookies_tempfile = tmp_cookies.name
|
|
|
|
self.to_screen(f'[download] Writing temporary cookies file to "{self._cookies_tempfile}"')
|
|
|
|
# real_download resets _cookies_tempfile; if it's None then save() will write to cookiejar.filename
|
2023-07-22 03:38:12 +00:00
|
|
|
self.ydl.cookiejar.save(self._cookies_tempfile)
|
2023-07-05 20:16:28 +00:00
|
|
|
return self.ydl.cookiejar.filename or self._cookies_tempfile
|
|
|
|
|
2015-01-24 00:38:48 +00:00
|
|
|
def _call_downloader(self, tmpfilename, info_dict):
|
|
|
|
""" Either overwrite this or implement _make_cmd """
|
2015-04-25 22:33:43 +00:00
|
|
|
cmd = [encodeArgument(a) for a in self._make_cmd(tmpfilename, info_dict)]
|
2015-01-24 00:38:48 +00:00
|
|
|
|
2015-04-25 22:33:43 +00:00
|
|
|
self._debug_cmd(cmd)
|
2015-01-24 00:38:48 +00:00
|
|
|
|
2021-10-12 22:41:25 +00:00
|
|
|
if 'fragments' not in info_dict:
|
2023-01-01 17:16:25 +00:00
|
|
|
_, stderr, returncode = self._call_process(cmd, info_dict)
|
2022-06-15 20:55:43 +00:00
|
|
|
if returncode and stderr:
|
|
|
|
self.to_stderr(stderr)
|
|
|
|
return returncode
|
2021-10-12 22:41:25 +00:00
|
|
|
|
|
|
|
skip_unavailable_fragments = self.params.get('skip_unavailable_fragments', True)
|
|
|
|
|
2022-08-01 20:13:18 +00:00
|
|
|
retry_manager = RetryManager(self.params.get('fragment_retries'), self.report_retry,
|
|
|
|
frag_index=None, fatal=not skip_unavailable_fragments)
|
|
|
|
for retry in retry_manager:
|
2023-01-01 17:16:25 +00:00
|
|
|
_, stderr, returncode = self._call_process(cmd, info_dict)
|
2022-06-15 20:55:43 +00:00
|
|
|
if not returncode:
|
2021-10-12 22:41:25 +00:00
|
|
|
break
|
|
|
|
# TODO: Decide whether to retry based on error code
|
|
|
|
# https://aria2.github.io/manual/en/html/aria2c.html#exit-status
|
2022-06-15 20:55:43 +00:00
|
|
|
if stderr:
|
|
|
|
self.to_stderr(stderr)
|
2022-08-01 20:13:18 +00:00
|
|
|
retry.error = Exception()
|
|
|
|
continue
|
|
|
|
if not skip_unavailable_fragments and retry_manager.error:
|
|
|
|
return -1
|
2021-10-12 22:41:25 +00:00
|
|
|
|
|
|
|
decrypt_fragment = self.decrypter(info_dict)
|
2021-12-23 02:29:03 +00:00
|
|
|
dest, _ = self.sanitize_open(tmpfilename, 'wb')
|
2021-10-12 22:41:25 +00:00
|
|
|
for frag_index, fragment in enumerate(info_dict['fragments']):
|
2024-06-11 23:09:58 +00:00
|
|
|
fragment_filename = f'{tmpfilename}-Frag{frag_index}'
|
2021-10-12 22:41:25 +00:00
|
|
|
try:
|
2021-12-23 02:29:03 +00:00
|
|
|
src, _ = self.sanitize_open(fragment_filename, 'rb')
|
2022-04-11 15:10:28 +00:00
|
|
|
except OSError as err:
|
2021-10-12 22:41:25 +00:00
|
|
|
if skip_unavailable_fragments and frag_index > 1:
|
2021-10-19 16:21:33 +00:00
|
|
|
self.report_skip_fragment(frag_index, err)
|
2021-10-12 22:41:25 +00:00
|
|
|
continue
|
2021-10-19 16:21:33 +00:00
|
|
|
self.report_error(f'Unable to open fragment {frag_index}; {err}')
|
2021-10-12 22:41:25 +00:00
|
|
|
return -1
|
|
|
|
dest.write(decrypt_fragment(fragment, src.read()))
|
|
|
|
src.close()
|
|
|
|
if not self.params.get('keep_fragments', False):
|
2024-11-16 23:24:11 +00:00
|
|
|
self.try_remove(fragment_filename)
|
2021-10-12 22:41:25 +00:00
|
|
|
dest.close()
|
2024-11-16 23:24:11 +00:00
|
|
|
self.try_remove(f'{tmpfilename}.frag.urls')
|
2021-10-12 22:41:25 +00:00
|
|
|
return 0
|
2015-01-24 00:38:48 +00:00
|
|
|
|
2023-01-01 17:16:25 +00:00
|
|
|
def _call_process(self, cmd, info_dict):
|
2023-03-09 16:27:44 +00:00
|
|
|
return Popen.run(cmd, text=True, stderr=subprocess.PIPE if self._CAPTURE_STDERR else None)
|
2023-01-01 17:16:25 +00:00
|
|
|
|
2015-01-24 00:38:48 +00:00
|
|
|
|
2015-01-24 12:33:45 +00:00
|
|
|
class CurlFD(ExternalFD):
|
2016-03-13 13:37:45 +00:00
|
|
|
AVAILABLE_OPT = '-V'
|
2022-06-15 20:55:43 +00:00
|
|
|
_CAPTURE_STDERR = False # curl writes the progress to stderr
|
2016-02-20 06:58:25 +00:00
|
|
|
|
2015-01-24 12:33:45 +00:00
|
|
|
def _make_cmd(self, tmpfilename, info_dict):
|
2022-03-23 07:47:02 +00:00
|
|
|
cmd = [self.exe, '--location', '-o', tmpfilename, '--compressed']
|
2023-07-15 20:18:25 +00:00
|
|
|
cookie_header = self.ydl.cookiejar.get_cookie_header(info_dict['url'])
|
|
|
|
if cookie_header:
|
|
|
|
cmd += ['--cookie', cookie_header]
|
2020-10-27 11:18:23 +00:00
|
|
|
if info_dict.get('http_headers') is not None:
|
|
|
|
for key, val in info_dict['http_headers'].items():
|
2022-04-11 15:10:28 +00:00
|
|
|
cmd += ['--header', f'{key}: {val}']
|
2020-10-27 11:18:23 +00:00
|
|
|
|
2016-08-12 11:30:02 +00:00
|
|
|
cmd += self._bool_option('--continue-at', 'continuedl', '-', '0')
|
|
|
|
cmd += self._valueless_option('--silent', 'noprogress')
|
|
|
|
cmd += self._valueless_option('--verbose', 'verbose')
|
|
|
|
cmd += self._option('--limit-rate', 'ratelimit')
|
2019-02-22 17:43:29 +00:00
|
|
|
retry = self._option('--retry', 'retries')
|
|
|
|
if len(retry) == 2:
|
|
|
|
if retry[1] in ('inf', 'infinite'):
|
|
|
|
retry[1] = '2147483647'
|
|
|
|
cmd += retry
|
2016-08-12 11:30:02 +00:00
|
|
|
cmd += self._option('--max-filesize', 'max_filesize')
|
2015-08-11 17:05:04 +00:00
|
|
|
cmd += self._option('--interface', 'source_address')
|
2015-09-03 21:25:33 +00:00
|
|
|
cmd += self._option('--proxy', 'proxy')
|
2015-09-04 20:12:13 +00:00
|
|
|
cmd += self._valueless_option('--insecure', 'nocheckcertificate')
|
2015-03-02 14:06:09 +00:00
|
|
|
cmd += self._configuration_args()
|
2015-01-24 12:33:45 +00:00
|
|
|
cmd += ['--', info_dict['url']]
|
|
|
|
return cmd
|
|
|
|
|
|
|
|
|
2015-08-06 17:12:58 +00:00
|
|
|
class AxelFD(ExternalFD):
|
2016-03-13 13:37:45 +00:00
|
|
|
AVAILABLE_OPT = '-V'
|
2016-02-20 06:58:25 +00:00
|
|
|
|
2015-08-06 17:12:58 +00:00
|
|
|
def _make_cmd(self, tmpfilename, info_dict):
|
|
|
|
cmd = [self.exe, '-o', tmpfilename]
|
2020-10-27 11:18:23 +00:00
|
|
|
if info_dict.get('http_headers') is not None:
|
|
|
|
for key, val in info_dict['http_headers'].items():
|
2022-04-11 15:10:28 +00:00
|
|
|
cmd += ['-H', f'{key}: {val}']
|
2023-07-05 20:16:28 +00:00
|
|
|
cookie_header = self.ydl.cookiejar.get_cookie_header(info_dict['url'])
|
|
|
|
if cookie_header:
|
2023-07-15 20:18:25 +00:00
|
|
|
cmd += ['-H', f'Cookie: {cookie_header}', '--max-redirect=0']
|
2015-08-06 17:12:58 +00:00
|
|
|
cmd += self._configuration_args()
|
|
|
|
cmd += ['--', info_dict['url']]
|
|
|
|
return cmd
|
|
|
|
|
|
|
|
|
2015-01-24 00:38:48 +00:00
|
|
|
class WgetFD(ExternalFD):
|
2016-03-13 13:37:45 +00:00
|
|
|
AVAILABLE_OPT = '--version'
|
2016-02-20 06:58:25 +00:00
|
|
|
|
2015-01-24 00:38:48 +00:00
|
|
|
def _make_cmd(self, tmpfilename, info_dict):
|
2023-07-05 20:16:28 +00:00
|
|
|
cmd = [self.exe, '-O', tmpfilename, '-nv', '--compression=auto']
|
|
|
|
if self.ydl.cookiejar.get_cookie_header(info_dict['url']):
|
|
|
|
cmd += ['--load-cookies', self._write_cookies()]
|
2020-10-27 11:18:23 +00:00
|
|
|
if info_dict.get('http_headers') is not None:
|
|
|
|
for key, val in info_dict['http_headers'].items():
|
2022-04-11 15:10:28 +00:00
|
|
|
cmd += ['--header', f'{key}: {val}']
|
2019-02-22 17:58:56 +00:00
|
|
|
cmd += self._option('--limit-rate', 'ratelimit')
|
|
|
|
retry = self._option('--tries', 'retries')
|
|
|
|
if len(retry) == 2:
|
|
|
|
if retry[1] in ('inf', 'infinite'):
|
|
|
|
retry[1] = '0'
|
|
|
|
cmd += retry
|
2015-08-11 17:05:04 +00:00
|
|
|
cmd += self._option('--bind-address', 'source_address')
|
2022-03-22 21:24:27 +00:00
|
|
|
proxy = self.params.get('proxy')
|
|
|
|
if proxy:
|
|
|
|
for var in ('http_proxy', 'https_proxy'):
|
2022-04-11 15:10:28 +00:00
|
|
|
cmd += ['--execute', f'{var}={proxy}']
|
2015-09-04 20:12:13 +00:00
|
|
|
cmd += self._valueless_option('--no-check-certificate', 'nocheckcertificate')
|
2015-03-02 14:06:09 +00:00
|
|
|
cmd += self._configuration_args()
|
2015-01-24 00:38:48 +00:00
|
|
|
cmd += ['--', info_dict['url']]
|
|
|
|
return cmd
|
|
|
|
|
|
|
|
|
2015-01-24 12:33:45 +00:00
|
|
|
class Aria2cFD(ExternalFD):
|
2016-03-13 13:37:45 +00:00
|
|
|
AVAILABLE_OPT = '-v'
|
2021-04-10 15:08:33 +00:00
|
|
|
SUPPORTED_PROTOCOLS = ('http', 'https', 'ftp', 'ftps', 'dash_frag_urls', 'm3u8_frag_urls')
|
2016-02-20 06:58:25 +00:00
|
|
|
|
2021-03-10 15:26:24 +00:00
|
|
|
@staticmethod
|
|
|
|
def supports_manifest(manifest):
|
|
|
|
UNSUPPORTED_FEATURES = [
|
|
|
|
r'#EXT-X-BYTERANGE', # playlists composed of byte ranges of media files [1]
|
|
|
|
# 1. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.2.2
|
|
|
|
]
|
|
|
|
check_results = (not re.search(feature, manifest) for feature in UNSUPPORTED_FEATURES)
|
|
|
|
return all(check_results)
|
|
|
|
|
2022-10-01 15:30:14 +00:00
|
|
|
@staticmethod
|
|
|
|
def _aria2c_filename(fn):
|
|
|
|
return fn if os.path.isabs(fn) else f'.{os.path.sep}{fn}'
|
|
|
|
|
2023-01-01 17:16:25 +00:00
|
|
|
def _call_downloader(self, tmpfilename, info_dict):
|
2023-01-03 11:55:56 +00:00
|
|
|
# FIXME: Disabled due to https://github.com/yt-dlp/yt-dlp/issues/5931
|
|
|
|
if False and 'no-external-downloader-progress' not in self.params.get('compat_opts', []):
|
2023-01-01 17:16:25 +00:00
|
|
|
info_dict['__rpc'] = {
|
|
|
|
'port': find_available_port() or 19190,
|
|
|
|
'secret': str(uuid.uuid4()),
|
|
|
|
}
|
|
|
|
return super()._call_downloader(tmpfilename, info_dict)
|
|
|
|
|
2015-01-24 12:33:45 +00:00
|
|
|
def _make_cmd(self, tmpfilename, info_dict):
|
2023-06-26 10:43:31 +00:00
|
|
|
cmd = [self.exe, '-c', '--no-conf',
|
2021-03-20 03:50:24 +00:00
|
|
|
'--console-log-level=warn', '--summary-interval=0', '--download-result=hide',
|
2022-02-18 11:47:45 +00:00
|
|
|
'--http-accept-gzip=true', '--file-allocation=none', '-x16', '-j16', '-s16']
|
2021-03-20 03:50:24 +00:00
|
|
|
if 'fragments' in info_dict:
|
|
|
|
cmd += ['--allow-overwrite=true', '--allow-piece-length-change=true']
|
2021-06-20 02:25:54 +00:00
|
|
|
else:
|
|
|
|
cmd += ['--min-split-size', '1M']
|
2021-03-20 03:50:24 +00:00
|
|
|
|
2023-07-05 20:16:28 +00:00
|
|
|
if self.ydl.cookiejar.get_cookie_header(info_dict['url']):
|
|
|
|
cmd += [f'--load-cookies={self._write_cookies()}']
|
2020-10-27 11:18:23 +00:00
|
|
|
if info_dict.get('http_headers') is not None:
|
|
|
|
for key, val in info_dict['http_headers'].items():
|
2022-04-11 15:10:28 +00:00
|
|
|
cmd += ['--header', f'{key}: {val}']
|
2021-08-26 19:29:36 +00:00
|
|
|
cmd += self._option('--max-overall-download-limit', 'ratelimit')
|
2015-08-11 17:05:04 +00:00
|
|
|
cmd += self._option('--interface', 'source_address')
|
2015-08-11 17:00:45 +00:00
|
|
|
cmd += self._option('--all-proxy', 'proxy')
|
2015-09-04 19:07:36 +00:00
|
|
|
cmd += self._bool_option('--check-certificate', 'nocheckcertificate', 'false', 'true', '=')
|
2019-08-31 17:24:43 +00:00
|
|
|
cmd += self._bool_option('--remote-time', 'updatetime', 'true', 'false', '=')
|
2021-12-26 22:57:34 +00:00
|
|
|
cmd += self._bool_option('--show-console-readout', 'noprogress', 'false', 'true', '=')
|
2021-03-20 03:50:24 +00:00
|
|
|
cmd += self._configuration_args()
|
|
|
|
|
2023-01-01 17:16:25 +00:00
|
|
|
if '__rpc' in info_dict:
|
|
|
|
cmd += [
|
|
|
|
'--enable-rpc',
|
|
|
|
f'--rpc-listen-port={info_dict["__rpc"]["port"]}',
|
|
|
|
f'--rpc-secret={info_dict["__rpc"]["secret"]}']
|
|
|
|
|
2021-05-01 15:58:30 +00:00
|
|
|
# aria2c strips out spaces from the beginning/end of filenames and paths.
|
|
|
|
# We work around this issue by adding a "./" to the beginning of the
|
|
|
|
# filename and relative path, and adding a "/" at the end of the path.
|
|
|
|
# See: https://github.com/yt-dlp/yt-dlp/issues/276
|
|
|
|
# https://github.com/ytdl-org/youtube-dl/issues/20312
|
|
|
|
# https://github.com/aria2/aria2/issues/1373
|
2021-03-20 03:50:24 +00:00
|
|
|
dn = os.path.dirname(tmpfilename)
|
|
|
|
if dn:
|
2022-10-01 15:30:14 +00:00
|
|
|
cmd += ['--dir', self._aria2c_filename(dn) + os.path.sep]
|
2021-03-20 03:50:24 +00:00
|
|
|
if 'fragments' not in info_dict:
|
2022-10-01 15:30:14 +00:00
|
|
|
cmd += ['--out', self._aria2c_filename(os.path.basename(tmpfilename))]
|
2021-02-08 16:46:01 +00:00
|
|
|
cmd += ['--auto-file-renaming=false']
|
2021-03-20 03:50:24 +00:00
|
|
|
|
2021-03-10 14:39:40 +00:00
|
|
|
if 'fragments' in info_dict:
|
2023-11-07 16:18:19 +00:00
|
|
|
cmd += ['--uri-selector=inorder']
|
2024-06-11 23:09:58 +00:00
|
|
|
url_list_file = f'{tmpfilename}.frag.urls'
|
2021-02-08 16:46:01 +00:00
|
|
|
url_list = []
|
2021-03-20 03:19:02 +00:00
|
|
|
for frag_index, fragment in enumerate(info_dict['fragments']):
|
2024-06-11 23:09:58 +00:00
|
|
|
fragment_filename = f'{os.path.basename(tmpfilename)}-Frag{frag_index}'
|
|
|
|
url_list.append('{}\n\tout={}'.format(fragment['url'], self._aria2c_filename(fragment_filename)))
|
2021-12-23 02:29:03 +00:00
|
|
|
stream, _ = self.sanitize_open(url_list_file, 'wb')
|
2022-05-09 11:54:28 +00:00
|
|
|
stream.write('\n'.join(url_list).encode())
|
2021-02-10 20:57:18 +00:00
|
|
|
stream.close()
|
2022-10-01 15:30:14 +00:00
|
|
|
cmd += ['-i', self._aria2c_filename(url_list_file)]
|
2021-02-08 16:46:01 +00:00
|
|
|
else:
|
|
|
|
cmd += ['--', info_dict['url']]
|
2015-01-24 12:33:45 +00:00
|
|
|
return cmd
|
|
|
|
|
2023-01-01 17:16:25 +00:00
|
|
|
def aria2c_rpc(self, rpc_port, rpc_secret, method, params=()):
|
|
|
|
# Does not actually need to be UUID, just unique
|
|
|
|
sanitycheck = str(uuid.uuid4())
|
|
|
|
d = json.dumps({
|
|
|
|
'jsonrpc': '2.0',
|
|
|
|
'id': sanitycheck,
|
|
|
|
'method': method,
|
|
|
|
'params': [f'token:{rpc_secret}', *params],
|
2024-06-11 23:09:58 +00:00
|
|
|
}).encode()
|
2023-07-09 07:53:02 +00:00
|
|
|
request = Request(
|
2023-01-01 17:16:25 +00:00
|
|
|
f'http://localhost:{rpc_port}/jsonrpc',
|
|
|
|
data=d, headers={
|
|
|
|
'Content-Type': 'application/json',
|
|
|
|
'Content-Length': f'{len(d)}',
|
2023-07-09 07:53:02 +00:00
|
|
|
}, proxies={'all': None})
|
2023-01-01 17:16:25 +00:00
|
|
|
with self.ydl.urlopen(request) as r:
|
|
|
|
resp = json.load(r)
|
|
|
|
assert resp.get('id') == sanitycheck, 'Something went wrong with RPC server'
|
|
|
|
return resp['result']
|
|
|
|
|
|
|
|
def _call_process(self, cmd, info_dict):
|
|
|
|
if '__rpc' not in info_dict:
|
|
|
|
return super()._call_process(cmd, info_dict)
|
|
|
|
|
|
|
|
send_rpc = functools.partial(self.aria2c_rpc, info_dict['__rpc']['port'], info_dict['__rpc']['secret'])
|
|
|
|
started = time.time()
|
|
|
|
|
|
|
|
fragmented = 'fragments' in info_dict
|
|
|
|
frag_count = len(info_dict['fragments']) if fragmented else 1
|
|
|
|
status = {
|
|
|
|
'filename': info_dict.get('_filename'),
|
|
|
|
'status': 'downloading',
|
|
|
|
'elapsed': 0,
|
|
|
|
'downloaded_bytes': 0,
|
|
|
|
'fragment_count': frag_count if fragmented else None,
|
|
|
|
'fragment_index': 0 if fragmented else None,
|
|
|
|
}
|
|
|
|
self._hook_progress(status, info_dict)
|
|
|
|
|
|
|
|
def get_stat(key, *obj, average=False):
|
|
|
|
val = tuple(filter(None, map(float, traverse_obj(obj, (..., ..., key))))) or [0]
|
|
|
|
return sum(val) / (len(val) if average else 1)
|
|
|
|
|
|
|
|
with Popen(cmd, text=True, stdout=subprocess.DEVNULL, stderr=subprocess.PIPE) as p:
|
|
|
|
# Add a small sleep so that RPC client can receive response,
|
|
|
|
# or the connection stalls infinitely
|
|
|
|
time.sleep(0.2)
|
|
|
|
retval = p.poll()
|
|
|
|
while retval is None:
|
|
|
|
# We don't use tellStatus as we won't know the GID without reading stdout
|
|
|
|
# Ref: https://aria2.github.io/manual/en/html/aria2c.html#aria2.tellActive
|
|
|
|
active = send_rpc('aria2.tellActive')
|
|
|
|
completed = send_rpc('aria2.tellStopped', [0, frag_count])
|
|
|
|
|
|
|
|
downloaded = get_stat('totalLength', completed) + get_stat('completedLength', active)
|
|
|
|
speed = get_stat('downloadSpeed', active)
|
|
|
|
total = frag_count * get_stat('totalLength', active, completed, average=True)
|
|
|
|
if total < downloaded:
|
|
|
|
total = None
|
|
|
|
|
|
|
|
status.update({
|
|
|
|
'downloaded_bytes': int(downloaded),
|
|
|
|
'speed': speed,
|
|
|
|
'total_bytes': None if fragmented else total,
|
|
|
|
'total_bytes_estimate': total,
|
|
|
|
'eta': (total - downloaded) / (speed or 1),
|
|
|
|
'fragment_index': min(frag_count, len(completed) + 1) if fragmented else None,
|
2024-06-11 23:09:58 +00:00
|
|
|
'elapsed': time.time() - started,
|
2023-01-01 17:16:25 +00:00
|
|
|
})
|
|
|
|
self._hook_progress(status, info_dict)
|
|
|
|
|
|
|
|
if not active and len(completed) >= frag_count:
|
|
|
|
send_rpc('aria2.shutdown')
|
|
|
|
retval = p.wait()
|
|
|
|
break
|
|
|
|
|
|
|
|
time.sleep(0.1)
|
|
|
|
retval = p.poll()
|
|
|
|
|
|
|
|
return '', p.stderr.read(), retval
|
|
|
|
|
2015-06-25 13:48:04 +00:00
|
|
|
|
|
|
|
class HttpieFD(ExternalFD):
|
2021-04-10 15:08:33 +00:00
|
|
|
AVAILABLE_OPT = '--version'
|
2022-03-25 07:08:33 +00:00
|
|
|
EXE_NAME = 'http'
|
2016-02-20 06:58:25 +00:00
|
|
|
|
2015-06-25 13:48:04 +00:00
|
|
|
def _make_cmd(self, tmpfilename, info_dict):
|
|
|
|
cmd = ['http', '--download', '--output', tmpfilename, info_dict['url']]
|
2020-10-27 11:18:23 +00:00
|
|
|
|
|
|
|
if info_dict.get('http_headers') is not None:
|
|
|
|
for key, val in info_dict['http_headers'].items():
|
2022-04-11 15:10:28 +00:00
|
|
|
cmd += [f'{key}:{val}']
|
2023-07-05 20:16:28 +00:00
|
|
|
|
|
|
|
# httpie 3.1.0+ removes the Cookie header on redirect, so this should be safe for now. [1]
|
|
|
|
# If we ever need cookie handling for redirects, we can export the cookiejar into a session. [2]
|
|
|
|
# 1: https://github.com/httpie/httpie/security/advisories/GHSA-9w4w-cpc8-h2fq
|
|
|
|
# 2: https://httpie.io/docs/cli/sessions
|
|
|
|
cookie_header = self.ydl.cookiejar.get_cookie_header(info_dict['url'])
|
|
|
|
if cookie_header:
|
|
|
|
cmd += [f'Cookie:{cookie_header}']
|
2015-06-25 13:48:04 +00:00
|
|
|
return cmd
|
|
|
|
|
2016-02-19 18:29:24 +00:00
|
|
|
|
|
|
|
class FFmpegFD(ExternalFD):
|
2021-08-24 00:12:45 +00:00
|
|
|
SUPPORTED_PROTOCOLS = ('http', 'https', 'ftp', 'ftps', 'm3u8', 'm3u8_native', 'rtsp', 'rtmp', 'rtmp_ffmpeg', 'mms', 'http_dash_segments')
|
2022-04-17 17:18:50 +00:00
|
|
|
SUPPORTED_FEATURES = (Features.TO_STDOUT, Features.MULTIPLE_FORMATS)
|
2016-02-19 18:29:24 +00:00
|
|
|
|
2016-02-20 06:58:25 +00:00
|
|
|
@classmethod
|
2021-04-10 15:08:33 +00:00
|
|
|
def available(cls, path=None):
|
|
|
|
# TODO: Fix path for ffmpeg
|
2021-07-31 10:51:01 +00:00
|
|
|
# Fixme: This may be wrong when --ffmpeg-location is used
|
2016-02-20 06:58:25 +00:00
|
|
|
return FFmpegPostProcessor().available
|
|
|
|
|
2021-06-21 17:23:17 +00:00
|
|
|
def on_process_started(self, proc, stdin):
|
|
|
|
""" Override this in subclasses """
|
|
|
|
pass
|
|
|
|
|
2021-07-31 10:51:01 +00:00
|
|
|
@classmethod
|
2021-09-11 12:17:26 +00:00
|
|
|
def can_merge_formats(cls, info_dict, params):
|
2021-07-31 10:51:01 +00:00
|
|
|
return (
|
|
|
|
info_dict.get('requested_formats')
|
|
|
|
and info_dict.get('protocol')
|
|
|
|
and not params.get('allow_unplayable_formats')
|
|
|
|
and 'no-direct-merge' not in params.get('compat_opts', [])
|
|
|
|
and cls.can_download(info_dict))
|
|
|
|
|
2016-02-19 18:29:24 +00:00
|
|
|
def _call_downloader(self, tmpfilename, info_dict):
|
|
|
|
ffpp = FFmpegPostProcessor(downloader=self)
|
2016-03-13 19:30:23 +00:00
|
|
|
if not ffpp.available:
|
2021-01-30 11:13:54 +00:00
|
|
|
self.report_error('m3u8 download detected but ffmpeg could not be found. Please install')
|
2016-03-13 19:30:23 +00:00
|
|
|
return False
|
2016-02-19 18:29:24 +00:00
|
|
|
ffpp.check_version()
|
|
|
|
|
|
|
|
args = [ffpp.executable, '-y']
|
|
|
|
|
2017-05-22 16:40:07 +00:00
|
|
|
for log_level in ('quiet', 'verbose'):
|
|
|
|
if self.params.get(log_level, False):
|
|
|
|
args += ['-loglevel', log_level]
|
|
|
|
break
|
2021-05-02 19:10:14 +00:00
|
|
|
if not self.params.get('verbose'):
|
|
|
|
args += ['-hide_banner']
|
2017-05-22 16:40:07 +00:00
|
|
|
|
2024-03-29 23:16:46 +00:00
|
|
|
args += traverse_obj(info_dict, ('downloader_options', 'ffmpeg_args', ...))
|
2021-07-29 15:31:27 +00:00
|
|
|
|
2022-04-27 16:22:57 +00:00
|
|
|
# These exists only for compatibility. Extractors should use
|
|
|
|
# info_dict['downloader_options']['ffmpeg_args'] instead
|
2022-04-29 01:48:36 +00:00
|
|
|
args += info_dict.get('_ffmpeg_args') or []
|
2017-02-04 14:23:46 +00:00
|
|
|
seekable = info_dict.get('_seekable')
|
|
|
|
if seekable is not None:
|
|
|
|
# setting -seekable prevents ffmpeg from guessing if the server
|
|
|
|
# supports seeking(by adding the header `Range: bytes=0-`), which
|
|
|
|
# can cause problems in some cases
|
2019-03-09 12:14:41 +00:00
|
|
|
# https://github.com/ytdl-org/youtube-dl/issues/11800#issuecomment-275037127
|
2017-02-04 14:23:46 +00:00
|
|
|
# http://trac.ffmpeg.org/ticket/6125#comment:10
|
|
|
|
args += ['-seekable', '1' if seekable else '0']
|
|
|
|
|
2016-05-09 16:05:12 +00:00
|
|
|
env = None
|
|
|
|
proxy = self.params.get('proxy')
|
|
|
|
if proxy:
|
2024-09-27 22:46:22 +00:00
|
|
|
if not re.match(r'[\da-zA-Z]+://', proxy):
|
2024-06-11 23:09:58 +00:00
|
|
|
proxy = f'http://{proxy}'
|
2016-08-25 14:38:06 +00:00
|
|
|
|
|
|
|
if proxy.startswith('socks'):
|
|
|
|
self.report_warning(
|
2024-06-11 23:09:58 +00:00
|
|
|
f'{self.get_basename()} does not support SOCKS proxies. Downloading is likely to fail. '
|
|
|
|
'Consider adding --hls-prefer-native to your command.')
|
2016-08-25 14:38:06 +00:00
|
|
|
|
2016-05-09 16:05:12 +00:00
|
|
|
# Since December 2015 ffmpeg supports -http_proxy option (see
|
|
|
|
# http://git.videolan.org/?p=ffmpeg.git;a=commit;h=b4eb1f29ebddd60c41a2eb39f5af701e38e0d3fd)
|
|
|
|
# We could switch to the following code if we are able to detect version properly
|
|
|
|
# args += ['-http_proxy', proxy]
|
|
|
|
env = os.environ.copy()
|
2022-06-24 08:10:17 +00:00
|
|
|
env['HTTP_PROXY'] = proxy
|
|
|
|
env['http_proxy'] = proxy
|
2016-05-09 16:05:12 +00:00
|
|
|
|
2016-03-14 15:49:16 +00:00
|
|
|
protocol = info_dict.get('protocol')
|
|
|
|
|
|
|
|
if protocol == 'rtmp':
|
|
|
|
player_url = info_dict.get('player_url')
|
|
|
|
page_url = info_dict.get('page_url')
|
|
|
|
app = info_dict.get('app')
|
|
|
|
play_path = info_dict.get('play_path')
|
|
|
|
tc_url = info_dict.get('tc_url')
|
|
|
|
flash_version = info_dict.get('flash_version')
|
|
|
|
live = info_dict.get('rtmp_live', False)
|
2019-04-02 21:41:23 +00:00
|
|
|
conn = info_dict.get('rtmp_conn')
|
2016-03-14 15:49:16 +00:00
|
|
|
if player_url is not None:
|
|
|
|
args += ['-rtmp_swfverify', player_url]
|
|
|
|
if page_url is not None:
|
|
|
|
args += ['-rtmp_pageurl', page_url]
|
|
|
|
if app is not None:
|
|
|
|
args += ['-rtmp_app', app]
|
|
|
|
if play_path is not None:
|
|
|
|
args += ['-rtmp_playpath', play_path]
|
|
|
|
if tc_url is not None:
|
|
|
|
args += ['-rtmp_tcurl', tc_url]
|
|
|
|
if flash_version is not None:
|
|
|
|
args += ['-rtmp_flashver', flash_version]
|
|
|
|
if live:
|
|
|
|
args += ['-rtmp_live', 'live']
|
2019-04-02 21:41:23 +00:00
|
|
|
if isinstance(conn, list):
|
|
|
|
for entry in conn:
|
|
|
|
args += ['-rtmp_conn', entry]
|
2022-04-17 17:18:50 +00:00
|
|
|
elif isinstance(conn, str):
|
2019-04-02 21:41:23 +00:00
|
|
|
args += ['-rtmp_conn', conn]
|
2016-03-14 15:49:16 +00:00
|
|
|
|
2022-06-06 20:13:50 +00:00
|
|
|
start_time, end_time = info_dict.get('section_start') or 0, info_dict.get('section_end')
|
|
|
|
|
2022-12-09 23:36:38 +00:00
|
|
|
selected_formats = info_dict.get('requested_formats') or [info_dict]
|
|
|
|
for i, fmt in enumerate(selected_formats):
|
2024-09-27 22:46:22 +00:00
|
|
|
is_http = re.match(r'https?://', fmt['url'])
|
2023-07-22 02:32:49 +00:00
|
|
|
cookies = self.ydl.cookiejar.get_cookies_for_url(fmt['url']) if is_http else []
|
2023-07-05 20:16:28 +00:00
|
|
|
if cookies:
|
|
|
|
args.extend(['-cookies', ''.join(
|
|
|
|
f'{cookie.name}={cookie.value}; path={cookie.path}; domain={cookie.domain};\r\n'
|
|
|
|
for cookie in cookies)])
|
2023-07-22 02:32:49 +00:00
|
|
|
if fmt.get('http_headers') and is_http:
|
2022-12-09 23:36:38 +00:00
|
|
|
# Trailing \r\n after each HTTP header is important to prevent warning from ffmpeg/avconv:
|
|
|
|
# [http @ 00000000003d2fa0] No trailing CRLF found in HTTP header.
|
2023-05-20 22:55:09 +00:00
|
|
|
args.extend(['-headers', ''.join(f'{key}: {val}\r\n' for key, val in fmt['http_headers'].items())])
|
2022-12-09 23:36:38 +00:00
|
|
|
|
2022-06-06 20:13:50 +00:00
|
|
|
if start_time:
|
|
|
|
args += ['-ss', str(start_time)]
|
|
|
|
if end_time:
|
|
|
|
args += ['-t', str(end_time - start_time)]
|
|
|
|
|
2024-06-11 23:09:58 +00:00
|
|
|
args += [*self._configuration_args((f'_i{i + 1}', '_i')), '-i', fmt['url']]
|
2021-06-27 02:12:13 +00:00
|
|
|
|
2022-06-06 20:13:50 +00:00
|
|
|
if not (start_time or end_time) or not self.params.get('force_keyframes_at_cuts'):
|
|
|
|
args += ['-c', 'copy']
|
|
|
|
|
2021-08-24 00:12:45 +00:00
|
|
|
if info_dict.get('requested_formats') or protocol == 'http_dash_segments':
|
2022-12-09 23:36:38 +00:00
|
|
|
for i, fmt in enumerate(selected_formats):
|
2021-08-24 00:12:45 +00:00
|
|
|
stream_number = fmt.get('manifest_stream_number', 0)
|
2021-11-22 18:02:14 +00:00
|
|
|
args.extend(['-map', f'{i}:{stream_number}'])
|
2017-03-05 03:19:44 +00:00
|
|
|
|
|
|
|
if self.params.get('test', False):
|
2022-04-17 17:18:50 +00:00
|
|
|
args += ['-fs', str(self._TEST_FILE_SIZE)]
|
2017-03-05 03:19:44 +00:00
|
|
|
|
2021-08-02 18:31:13 +00:00
|
|
|
ext = info_dict['ext']
|
2016-05-07 20:29:26 +00:00
|
|
|
if protocol in ('m3u8', 'm3u8_native'):
|
2021-02-26 16:21:31 +00:00
|
|
|
use_mpegts = (tmpfilename == '-') or self.params.get('hls_use_mpegts')
|
|
|
|
if use_mpegts is None:
|
|
|
|
use_mpegts = info_dict.get('is_live')
|
|
|
|
if use_mpegts:
|
2016-02-19 18:29:24 +00:00
|
|
|
args += ['-f', 'mpegts']
|
|
|
|
else:
|
2017-02-02 07:05:16 +00:00
|
|
|
args += ['-f', 'mp4']
|
2021-11-03 21:40:49 +00:00
|
|
|
if (ffpp.basename == 'ffmpeg' and ffpp._features.get('needs_adtstoasc')) and (not info_dict.get('acodec') or info_dict['acodec'].split('.')[0] in ('aac', 'mp4a')):
|
2017-02-02 07:05:16 +00:00
|
|
|
args += ['-bsf:a', 'aac_adtstoasc']
|
2016-03-14 15:49:16 +00:00
|
|
|
elif protocol == 'rtmp':
|
|
|
|
args += ['-f', 'flv']
|
2021-08-02 18:31:13 +00:00
|
|
|
elif ext == 'mp4' and tmpfilename == '-':
|
|
|
|
args += ['-f', 'mpegts']
|
2022-01-23 19:45:44 +00:00
|
|
|
elif ext == 'unknown_video':
|
|
|
|
ext = determine_ext(remove_end(tmpfilename, '.part'))
|
|
|
|
if ext == 'unknown_video':
|
|
|
|
self.report_warning(
|
|
|
|
'The video format is unknown and cannot be downloaded by ffmpeg. '
|
|
|
|
'Explicitly set the extension in the filename to attempt download in that format')
|
|
|
|
else:
|
|
|
|
self.report_warning(f'The video format is unknown. Trying to download as {ext} according to the filename')
|
|
|
|
args += ['-f', EXT_TO_OUT_FORMATS.get(ext, ext)]
|
2016-02-19 18:29:24 +00:00
|
|
|
else:
|
2021-08-02 18:31:13 +00:00
|
|
|
args += ['-f', EXT_TO_OUT_FORMATS.get(ext, ext)]
|
2016-02-19 18:29:24 +00:00
|
|
|
|
2024-03-29 23:16:46 +00:00
|
|
|
args += traverse_obj(info_dict, ('downloader_options', 'ffmpeg_args_out', ...))
|
|
|
|
|
2021-08-24 00:12:45 +00:00
|
|
|
args += self._configuration_args(('_o1', '_o', ''))
|
2021-08-23 21:45:44 +00:00
|
|
|
|
2016-02-19 18:29:24 +00:00
|
|
|
args = [encodeArgument(opt) for opt in args]
|
2024-11-16 23:24:11 +00:00
|
|
|
args.append(ffpp._ffmpeg_filename_argument(tmpfilename))
|
2016-02-19 18:29:24 +00:00
|
|
|
self._debug_cmd(args)
|
|
|
|
|
2022-12-09 23:36:38 +00:00
|
|
|
piped = any(fmt['url'] in ('-', 'pipe:') for fmt in selected_formats)
|
2022-06-15 20:55:43 +00:00
|
|
|
with Popen(args, stdin=subprocess.PIPE, env=env) as proc:
|
2022-12-09 23:36:38 +00:00
|
|
|
if piped:
|
2022-06-15 20:55:43 +00:00
|
|
|
self.on_process_started(proc, proc.stdin)
|
|
|
|
try:
|
|
|
|
retval = proc.wait()
|
|
|
|
except BaseException as e:
|
|
|
|
# subprocces.run would send the SIGKILL signal to ffmpeg and the
|
|
|
|
# mp4 file couldn't be played, but if we ask ffmpeg to quit it
|
|
|
|
# produces a file that is playable (this is mostly useful for live
|
|
|
|
# streams). Note that Windows is not affected and produces playable
|
|
|
|
# files (see https://github.com/ytdl-org/youtube-dl/issues/8300).
|
2022-12-09 23:36:38 +00:00
|
|
|
if isinstance(e, KeyboardInterrupt) and sys.platform != 'win32' and not piped:
|
2022-06-15 20:55:43 +00:00
|
|
|
proc.communicate_or_kill(b'q')
|
|
|
|
else:
|
|
|
|
proc.kill(timeout=None)
|
|
|
|
raise
|
|
|
|
return retval
|
2016-02-19 18:29:24 +00:00
|
|
|
|
|
|
|
|
|
|
|
class AVconvFD(FFmpegFD):
|
|
|
|
pass
|
|
|
|
|
2016-11-17 11:42:56 +00:00
|
|
|
|
2022-03-25 07:08:33 +00:00
|
|
|
_BY_NAME = {
|
|
|
|
klass.get_basename(): klass
|
2015-01-24 00:38:48 +00:00
|
|
|
for name, klass in globals().items()
|
2021-09-21 23:57:07 +00:00
|
|
|
if name.endswith('FD') and name not in ('ExternalFD', 'FragmentFD')
|
2022-03-25 07:08:33 +00:00
|
|
|
}
|
|
|
|
|
2015-01-24 00:38:48 +00:00
|
|
|
|
|
|
|
def list_external_downloaders():
|
|
|
|
return sorted(_BY_NAME.keys())
|
|
|
|
|
|
|
|
|
|
|
|
def get_external_downloader(external_downloader):
|
2022-08-30 12:40:48 +00:00
|
|
|
""" Given the name of the executable, see whether we support the given downloader """
|
2015-06-27 18:08:52 +00:00
|
|
|
bn = os.path.splitext(os.path.basename(external_downloader))[0]
|
2022-08-30 12:40:48 +00:00
|
|
|
return _BY_NAME.get(bn) or next((
|
|
|
|
klass for klass in _BY_NAME.values() if klass.EXE_NAME in bn
|
|
|
|
), None)
|