2021-12-20 06:06:46 +00:00
|
|
|
import time
|
2022-11-21 00:51:45 +00:00
|
|
|
import urllib.parse
|
2015-06-03 15:10:18 +00:00
|
|
|
|
2022-04-17 17:18:50 +00:00
|
|
|
from . import get_suitable_downloader
|
2016-02-09 16:25:02 +00:00
|
|
|
from .fragment import FragmentFD
|
2022-11-21 00:51:45 +00:00
|
|
|
from ..utils import update_url_query, urljoin
|
2015-06-04 14:12:05 +00:00
|
|
|
|
2015-06-03 15:10:18 +00:00
|
|
|
|
2016-02-09 16:25:02 +00:00
|
|
|
class DashSegmentsFD(FragmentFD):
|
2015-06-03 15:10:18 +00:00
|
|
|
"""
|
2021-03-10 15:26:24 +00:00
|
|
|
Download segments in a DASH manifest. External downloaders can take over
|
2021-04-10 15:08:33 +00:00
|
|
|
the fragment downloads by supporting the 'dash_frag_urls' protocol
|
2015-06-03 15:10:18 +00:00
|
|
|
"""
|
|
|
|
|
2016-02-09 16:25:02 +00:00
|
|
|
FD_NAME = 'dashsegments'
|
2015-06-10 06:45:54 +00:00
|
|
|
|
2016-02-09 16:25:02 +00:00
|
|
|
def real_download(self, filename, info_dict):
|
2023-11-07 21:28:34 +00:00
|
|
|
if 'http_dash_segments_generator' in info_dict['protocol'].split('+'):
|
|
|
|
real_downloader = None # No external FD can support --live-from-start
|
|
|
|
else:
|
|
|
|
if info_dict.get('is_live'):
|
|
|
|
self.report_error('Live DASH videos are not supported')
|
|
|
|
real_downloader = get_suitable_downloader(
|
|
|
|
info_dict, self.params, None, protocol='dash_frag_urls', to_stdout=(filename == '-'))
|
2021-07-31 10:51:01 +00:00
|
|
|
|
2021-12-20 06:06:46 +00:00
|
|
|
real_start = time.time()
|
2021-02-08 16:46:01 +00:00
|
|
|
|
2021-12-20 06:06:46 +00:00
|
|
|
requested_formats = [{**info_dict, **fmt} for fmt in info_dict.get('requested_formats', [])]
|
|
|
|
args = []
|
|
|
|
for fmt in requested_formats or [info_dict]:
|
|
|
|
try:
|
|
|
|
fragment_count = 1 if self.params.get('test') else len(fmt['fragments'])
|
|
|
|
except TypeError:
|
|
|
|
fragment_count = None
|
|
|
|
ctx = {
|
|
|
|
'filename': fmt.get('filepath') or filename,
|
|
|
|
'live': 'is_from_start' if fmt.get('is_from_start') else fmt.get('is_live'),
|
|
|
|
'total_frags': fragment_count,
|
|
|
|
}
|
|
|
|
|
|
|
|
if real_downloader:
|
|
|
|
self._prepare_external_frag_download(ctx)
|
|
|
|
else:
|
|
|
|
self._prepare_and_start_frag_download(ctx, fmt)
|
|
|
|
ctx['start'] = real_start
|
|
|
|
|
2022-11-21 00:51:45 +00:00
|
|
|
extra_query = None
|
|
|
|
extra_param_to_segment_url = info_dict.get('extra_param_to_segment_url')
|
|
|
|
if extra_param_to_segment_url:
|
|
|
|
extra_query = urllib.parse.parse_qs(extra_param_to_segment_url)
|
|
|
|
|
|
|
|
fragments_to_download = self._get_fragments(fmt, ctx, extra_query)
|
2021-12-20 06:06:46 +00:00
|
|
|
|
|
|
|
if real_downloader:
|
|
|
|
self.to_screen(
|
2022-04-11 15:10:28 +00:00
|
|
|
f'[{self.FD_NAME}] Fragment downloads will be delegated to {real_downloader.get_basename()}')
|
2021-12-21 16:08:13 +00:00
|
|
|
info_dict['fragments'] = list(fragments_to_download)
|
2021-12-20 06:06:46 +00:00
|
|
|
fd = real_downloader(self.ydl, self.params)
|
|
|
|
return fd.real_download(filename, info_dict)
|
|
|
|
|
|
|
|
args.append([ctx, fragments_to_download, fmt])
|
2015-06-10 06:45:54 +00:00
|
|
|
|
2022-10-18 13:03:00 +00:00
|
|
|
return self.download_and_append_fragments_multiple(*args, is_fatal=lambda idx: idx == 0)
|
2021-12-20 06:06:46 +00:00
|
|
|
|
|
|
|
def _resolve_fragments(self, fragments, ctx):
|
|
|
|
fragments = fragments(ctx) if callable(fragments) else fragments
|
2021-12-22 22:04:18 +00:00
|
|
|
return [next(iter(fragments))] if self.params.get('test') else fragments
|
2021-12-20 06:06:46 +00:00
|
|
|
|
2022-11-21 00:51:45 +00:00
|
|
|
def _get_fragments(self, fmt, ctx, extra_query):
|
2021-12-20 06:06:46 +00:00
|
|
|
fragment_base_url = fmt.get('fragment_base_url')
|
|
|
|
fragments = self._resolve_fragments(fmt['fragments'], ctx)
|
2015-06-03 15:10:18 +00:00
|
|
|
|
2016-06-28 17:07:50 +00:00
|
|
|
frag_index = 0
|
2017-08-04 23:57:19 +00:00
|
|
|
for i, fragment in enumerate(fragments):
|
2016-06-28 17:07:50 +00:00
|
|
|
frag_index += 1
|
2017-04-22 15:42:24 +00:00
|
|
|
if frag_index <= ctx['fragment_index']:
|
2016-06-28 17:07:50 +00:00
|
|
|
continue
|
2021-02-08 16:46:01 +00:00
|
|
|
fragment_url = fragment.get('url')
|
|
|
|
if not fragment_url:
|
|
|
|
assert fragment_base_url
|
|
|
|
fragment_url = urljoin(fragment_base_url, fragment['path'])
|
2022-11-21 00:51:45 +00:00
|
|
|
if extra_query:
|
|
|
|
fragment_url = update_url_query(fragment_url, extra_query)
|
2021-02-08 16:46:01 +00:00
|
|
|
|
2021-12-20 06:06:46 +00:00
|
|
|
yield {
|
2021-03-13 04:46:58 +00:00
|
|
|
'frag_index': frag_index,
|
2022-06-07 12:44:08 +00:00
|
|
|
'fragment_count': fragment.get('fragment_count'),
|
2021-03-13 04:46:58 +00:00
|
|
|
'index': i,
|
|
|
|
'url': fragment_url,
|
2023-11-28 21:54:13 +00:00
|
|
|
'byte_range': fragment.get('byte_range'),
|
2021-12-20 06:06:46 +00:00
|
|
|
}
|