[downloader/ffmpeg] Specify headers for each URL (#3553)

Closes #2696
Authored by: elyse0
This commit is contained in:
Elyse 2022-04-26 04:54:56 -05:00 committed by GitHub
parent 7ab56be2c7
commit 00828e2c93
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -382,13 +382,15 @@ def _call_downloader(self, tmpfilename, info_dict):
# if end_time:
# args += ['-t', compat_str(end_time - start_time)]
if info_dict.get('http_headers') is not None and re.match(r'^https?://', urls[0]):
# Trailing \r\n after each HTTP header is important to prevent warning from ffmpeg/avconv:
# [http @ 00000000003d2fa0] No trailing CRLF found in HTTP header.
headers = handle_youtubedl_headers(info_dict['http_headers'])
args += [
http_headers = None
if info_dict.get('http_headers'):
youtubedl_headers = handle_youtubedl_headers(info_dict['http_headers'])
http_headers = [
# Trailing \r\n after each HTTP header is important to prevent warning from ffmpeg/avconv:
# [http @ 00000000003d2fa0] No trailing CRLF found in HTTP header.
'-headers',
''.join(f'{key}: {val}\r\n' for key, val in headers.items())]
''.join(f'{key}: {val}\r\n' for key, val in youtubedl_headers.items())
]
env = None
proxy = self.params.get('proxy')
@ -441,6 +443,11 @@ def _call_downloader(self, tmpfilename, info_dict):
args += ['-rtmp_conn', conn]
for i, url in enumerate(urls):
# We need to specify headers for each http input stream
# otherwise, it will only be applied to the first.
# https://github.com/yt-dlp/yt-dlp/issues/2696
if http_headers is not None and re.match(r'^https?://', url):
args += http_headers
args += self._configuration_args((f'_i{i + 1}', '_i')) + ['-i', url]
args += ['-c', 'copy']