mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2024-11-30 12:01:28 +00:00
[hearthisat] Add support for more high-quality download links
This commit is contained in:
parent
e5763a7a7e
commit
b55ee18ff3
|
@ -4,10 +4,15 @@
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from .common import InfoExtractor
|
from .common import InfoExtractor
|
||||||
from ..compat import compat_urllib_request
|
from ..compat import (
|
||||||
|
compat_urllib_request,
|
||||||
|
compat_urlparse,
|
||||||
|
)
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
|
HEADRequest,
|
||||||
str_to_int,
|
str_to_int,
|
||||||
urlencode_postdata,
|
urlencode_postdata,
|
||||||
|
urlhandle_detect_ext,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -16,10 +21,10 @@ class HearThisAtIE(InfoExtractor):
|
||||||
_PLAYLIST_URL = 'https://hearthis.at/playlist.php'
|
_PLAYLIST_URL = 'https://hearthis.at/playlist.php'
|
||||||
_TEST = {
|
_TEST = {
|
||||||
'url': 'https://hearthis.at/moofi/dr-kreep',
|
'url': 'https://hearthis.at/moofi/dr-kreep',
|
||||||
'md5': 'd594c573227a89f4256f0b03e68c80cc',
|
'md5': 'ab6ec33c8fed6556029337c7885eb4e0',
|
||||||
'info_dict': {
|
'info_dict': {
|
||||||
'id': '150939',
|
'id': '150939',
|
||||||
'ext': 'mp3',
|
'ext': 'wav',
|
||||||
'title': 'Moofi - Dr. Kreep',
|
'title': 'Moofi - Dr. Kreep',
|
||||||
'thumbnail': 're:^https?://.*\.jpg$',
|
'thumbnail': 're:^https?://.*\.jpg$',
|
||||||
'timestamp': 1421564134,
|
'timestamp': 1421564134,
|
||||||
|
@ -67,18 +72,38 @@ def _real_extract(self, url):
|
||||||
timestamp = str_to_int(self._search_regex(
|
timestamp = str_to_int(self._search_regex(
|
||||||
r'<span[^>]+class="calctime"[^>]+data-time="(\d+)', webpage, 'timestamp', fatal=False))
|
r'<span[^>]+class="calctime"[^>]+data-time="(\d+)', webpage, 'timestamp', fatal=False))
|
||||||
|
|
||||||
track_url = self._search_regex(
|
formats = []
|
||||||
r'<a[^>]+data-mp3="([^"]+)"', webpage, 'track URL')
|
mp3_url = self._search_regex(
|
||||||
|
r'(?s)<a class="player-link"\s+(?:[a-zA-Z0-9_:-]+="[^"]+"\s+)*?data-mp3="([^"]+)"',
|
||||||
formats = [{
|
webpage, 'title', fatal=False)
|
||||||
'format_id': 'mp3',
|
if mp3_url:
|
||||||
'url': track_url,
|
formats.append({
|
||||||
'vcodec': 'none',
|
'format_id': 'mp3',
|
||||||
}]
|
'vcodec': 'none',
|
||||||
|
'acodec': 'mp3',
|
||||||
|
'url': mp3_url,
|
||||||
|
})
|
||||||
|
download_path = self._search_regex(
|
||||||
|
r'<a class="[^"]*download_fct[^"]*"\s+href="([^"]+)"',
|
||||||
|
webpage, 'download URL', default=None)
|
||||||
|
if download_path:
|
||||||
|
download_url = compat_urlparse.urljoin(url, download_path)
|
||||||
|
ext_req = HEADRequest(download_url)
|
||||||
|
ext_handle = self._request_webpage(
|
||||||
|
ext_req, display_id, note='Determining extension')
|
||||||
|
ext = urlhandle_detect_ext(ext_handle)
|
||||||
|
formats.append({
|
||||||
|
'format_id': 'download',
|
||||||
|
'vcodec': 'none',
|
||||||
|
'ext': ext,
|
||||||
|
'url': download_url,
|
||||||
|
'preference': 2, # Usually better quality
|
||||||
|
})
|
||||||
|
self._sort_formats(formats)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'id': track_id,
|
'id': track_id,
|
||||||
'display-id': display_id,
|
'display_id': display_id,
|
||||||
'title': title,
|
'title': title,
|
||||||
'formats': formats,
|
'formats': formats,
|
||||||
'thumbnail': thumbnail,
|
'thumbnail': thumbnail,
|
||||||
|
|
|
@ -1612,6 +1612,14 @@ def urlhandle_detect_ext(url_handle):
|
||||||
except AttributeError: # Python < 3
|
except AttributeError: # Python < 3
|
||||||
getheader = url_handle.info().getheader
|
getheader = url_handle.info().getheader
|
||||||
|
|
||||||
|
cd = getheader('Content-Disposition')
|
||||||
|
if cd:
|
||||||
|
m = re.match(r'attachment;\s*filename="(?P<filename>[^"]+)"', cd)
|
||||||
|
if m:
|
||||||
|
e = determine_ext(m.group('filename'), default_ext=None)
|
||||||
|
if e:
|
||||||
|
return e
|
||||||
|
|
||||||
return getheader('Content-Type').split("/")[1]
|
return getheader('Content-Type').split("/")[1]
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue