Support loading info.json with a list at it's root

This commit is contained in:
pukkandan 2023-03-10 14:12:08 +05:30
parent 871c907454
commit ab1de9cb1e
No known key found for this signature in database
GPG key ID: 7EEE9E1E817D0A39
2 changed files with 14 additions and 11 deletions

View file

@ -3376,18 +3376,19 @@ def download_with_info_file(self, info_filename):
[info_filename], mode='r', [info_filename], mode='r',
openhook=fileinput.hook_encoded('utf-8'))) as f: openhook=fileinput.hook_encoded('utf-8'))) as f:
# FileInput doesn't have a read method, we can't call json.load # FileInput doesn't have a read method, we can't call json.load
info = self.sanitize_info(json.loads('\n'.join(f)), self.params.get('clean_infojson', True)) infos = [self.sanitize_info(info, self.params.get('clean_infojson', True))
for info in variadic(json.loads('\n'.join(f)))]
for info in infos:
try: try:
self.__download_wrapper(self.process_ie_result)(info, download=True) self.__download_wrapper(self.process_ie_result)(info, download=True)
except (DownloadError, EntryNotInPlaylist, ReExtractInfo) as e: except (DownloadError, EntryNotInPlaylist, ReExtractInfo) as e:
if not isinstance(e, EntryNotInPlaylist): if not isinstance(e, EntryNotInPlaylist):
self.to_stderr('\r') self.to_stderr('\r')
webpage_url = info.get('webpage_url') webpage_url = info.get('webpage_url')
if webpage_url is not None: if webpage_url is None:
self.report_warning(f'The info failed to download: {e}; trying with URL {webpage_url}')
return self.download([webpage_url])
else:
raise raise
self.report_warning(f'The info failed to download: {e}; trying with URL {webpage_url}')
self.download([webpage_url])
return self._download_retcode return self._download_retcode
@staticmethod @staticmethod

View file

@ -952,6 +952,8 @@ def _real_main(argv=None):
parser.destroy() parser.destroy()
try: try:
if opts.load_info_filename is not None: if opts.load_info_filename is not None:
if all_urls:
ydl.report_warning('URLs are ignored due to --load-info-json')
return ydl.download_with_info_file(expand_path(opts.load_info_filename)) return ydl.download_with_info_file(expand_path(opts.load_info_filename))
else: else:
return ydl.download(all_urls) return ydl.download(all_urls)