mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2024-11-17 01:25:11 +00:00
Merge pull request #8201 from remitamine/hls-aes
[downloader/hls] Add support for AES-128 encrypted segments in hlsnative downloader
This commit is contained in:
commit
6aeb64b673
3 changed files with 68 additions and 36 deletions
|
@ -2,14 +2,24 @@
|
||||||
|
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
|
import binascii
|
||||||
|
try:
|
||||||
|
from Crypto.Cipher import AES
|
||||||
|
can_decrypt_frag = True
|
||||||
|
except ImportError:
|
||||||
|
can_decrypt_frag = False
|
||||||
|
|
||||||
from .fragment import FragmentFD
|
from .fragment import FragmentFD
|
||||||
from .external import FFmpegFD
|
from .external import FFmpegFD
|
||||||
|
|
||||||
from ..compat import compat_urlparse
|
from ..compat import (
|
||||||
|
compat_urlparse,
|
||||||
|
compat_struct_pack,
|
||||||
|
)
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
encodeFilename,
|
encodeFilename,
|
||||||
sanitize_open,
|
sanitize_open,
|
||||||
|
parse_m3u8_attributes,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -21,7 +31,7 @@ class HlsFD(FragmentFD):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def can_download(manifest):
|
def can_download(manifest):
|
||||||
UNSUPPORTED_FEATURES = (
|
UNSUPPORTED_FEATURES = (
|
||||||
r'#EXT-X-KEY:METHOD=(?!NONE)', # encrypted streams [1]
|
r'#EXT-X-KEY:METHOD=(?!NONE|AES-128)', # encrypted streams [1]
|
||||||
r'#EXT-X-BYTERANGE', # playlists composed of byte ranges of media files [2]
|
r'#EXT-X-BYTERANGE', # playlists composed of byte ranges of media files [2]
|
||||||
|
|
||||||
# Live streams heuristic does not always work (e.g. geo restricted to Germany
|
# Live streams heuristic does not always work (e.g. geo restricted to Germany
|
||||||
|
@ -39,7 +49,9 @@ def can_download(manifest):
|
||||||
# 3. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.3.2
|
# 3. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.3.2
|
||||||
# 4. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.3.5
|
# 4. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.3.5
|
||||||
)
|
)
|
||||||
return all(not re.search(feature, manifest) for feature in UNSUPPORTED_FEATURES)
|
check_results = [not re.search(feature, manifest) for feature in UNSUPPORTED_FEATURES]
|
||||||
|
check_results.append(not (re.search(r'#EXT-X-KEY:METHOD=AES-128', manifest) and not can_decrypt_frag))
|
||||||
|
return all(check_results)
|
||||||
|
|
||||||
def real_download(self, filename, info_dict):
|
def real_download(self, filename, info_dict):
|
||||||
man_url = info_dict['url']
|
man_url = info_dict['url']
|
||||||
|
@ -57,36 +69,58 @@ def real_download(self, filename, info_dict):
|
||||||
fd.add_progress_hook(ph)
|
fd.add_progress_hook(ph)
|
||||||
return fd.real_download(filename, info_dict)
|
return fd.real_download(filename, info_dict)
|
||||||
|
|
||||||
fragment_urls = []
|
total_frags = 0
|
||||||
for line in s.splitlines():
|
for line in s.splitlines():
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
if line and not line.startswith('#'):
|
if line and not line.startswith('#'):
|
||||||
segment_url = (
|
total_frags += 1
|
||||||
line
|
|
||||||
if re.match(r'^https?://', line)
|
|
||||||
else compat_urlparse.urljoin(man_url, line))
|
|
||||||
fragment_urls.append(segment_url)
|
|
||||||
# We only download the first fragment during the test
|
|
||||||
if self.params.get('test', False):
|
|
||||||
break
|
|
||||||
|
|
||||||
ctx = {
|
ctx = {
|
||||||
'filename': filename,
|
'filename': filename,
|
||||||
'total_frags': len(fragment_urls),
|
'total_frags': total_frags,
|
||||||
}
|
}
|
||||||
|
|
||||||
self._prepare_and_start_frag_download(ctx)
|
self._prepare_and_start_frag_download(ctx)
|
||||||
|
|
||||||
|
i = 0
|
||||||
|
media_sequence = 0
|
||||||
|
decrypt_info = {'METHOD': 'NONE'}
|
||||||
frags_filenames = []
|
frags_filenames = []
|
||||||
for i, frag_url in enumerate(fragment_urls):
|
for line in s.splitlines():
|
||||||
|
line = line.strip()
|
||||||
|
if line:
|
||||||
|
if not line.startswith('#'):
|
||||||
|
frag_url = (
|
||||||
|
line
|
||||||
|
if re.match(r'^https?://', line)
|
||||||
|
else compat_urlparse.urljoin(man_url, line))
|
||||||
frag_filename = '%s-Frag%d' % (ctx['tmpfilename'], i)
|
frag_filename = '%s-Frag%d' % (ctx['tmpfilename'], i)
|
||||||
success = ctx['dl'].download(frag_filename, {'url': frag_url})
|
success = ctx['dl'].download(frag_filename, {'url': frag_url})
|
||||||
if not success:
|
if not success:
|
||||||
return False
|
return False
|
||||||
down, frag_sanitized = sanitize_open(frag_filename, 'rb')
|
down, frag_sanitized = sanitize_open(frag_filename, 'rb')
|
||||||
ctx['dest_stream'].write(down.read())
|
frag_content = down.read()
|
||||||
down.close()
|
down.close()
|
||||||
|
if decrypt_info['METHOD'] == 'AES-128':
|
||||||
|
iv = decrypt_info.get('IV') or compat_struct_pack(">8xq", media_sequence)
|
||||||
|
frag_content = AES.new(decrypt_info['KEY'], AES.MODE_CBC, iv).decrypt(frag_content)
|
||||||
|
ctx['dest_stream'].write(frag_content)
|
||||||
frags_filenames.append(frag_sanitized)
|
frags_filenames.append(frag_sanitized)
|
||||||
|
# We only download the first fragment during the test
|
||||||
|
if self.params.get('test', False):
|
||||||
|
break
|
||||||
|
i += 1
|
||||||
|
media_sequence += 1
|
||||||
|
elif line.startswith('#EXT-X-KEY'):
|
||||||
|
decrypt_info = parse_m3u8_attributes(line[11:])
|
||||||
|
if decrypt_info['METHOD'] == 'AES-128':
|
||||||
|
if 'IV' in decrypt_info:
|
||||||
|
decrypt_info['IV'] = binascii.unhexlify(decrypt_info['IV'][2:])
|
||||||
|
if not re.match(r'^https?://', decrypt_info['URI']):
|
||||||
|
decrypt_info['URI'] = compat_urlparse.urljoin(man_url, decrypt_info['URI'])
|
||||||
|
decrypt_info['KEY'] = self.ydl.urlopen(decrypt_info['URI']).read()
|
||||||
|
elif line.startswith('#EXT-X-MEDIA-SEQUENCE'):
|
||||||
|
media_sequence = int(line[22:])
|
||||||
|
|
||||||
self._finish_frag_download(ctx)
|
self._finish_frag_download(ctx)
|
||||||
|
|
||||||
|
|
|
@ -53,6 +53,7 @@
|
||||||
mimetype2ext,
|
mimetype2ext,
|
||||||
update_Request,
|
update_Request,
|
||||||
update_url_query,
|
update_url_query,
|
||||||
|
parse_m3u8_attributes,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1150,23 +1151,11 @@ def _extract_m3u8_formats(self, m3u8_url, video_id, ext=None,
|
||||||
}]
|
}]
|
||||||
last_info = None
|
last_info = None
|
||||||
last_media = None
|
last_media = None
|
||||||
kv_rex = re.compile(
|
|
||||||
r'(?P<key>[a-zA-Z_-]+)=(?P<val>"[^"]+"|[^",]+)(?:,|$)')
|
|
||||||
for line in m3u8_doc.splitlines():
|
for line in m3u8_doc.splitlines():
|
||||||
if line.startswith('#EXT-X-STREAM-INF:'):
|
if line.startswith('#EXT-X-STREAM-INF:'):
|
||||||
last_info = {}
|
last_info = parse_m3u8_attributes(line)
|
||||||
for m in kv_rex.finditer(line):
|
|
||||||
v = m.group('val')
|
|
||||||
if v.startswith('"'):
|
|
||||||
v = v[1:-1]
|
|
||||||
last_info[m.group('key')] = v
|
|
||||||
elif line.startswith('#EXT-X-MEDIA:'):
|
elif line.startswith('#EXT-X-MEDIA:'):
|
||||||
last_media = {}
|
last_media = parse_m3u8_attributes(line)
|
||||||
for m in kv_rex.finditer(line):
|
|
||||||
v = m.group('val')
|
|
||||||
if v.startswith('"'):
|
|
||||||
v = v[1:-1]
|
|
||||||
last_media[m.group('key')] = v
|
|
||||||
elif line.startswith('#') or not line.strip():
|
elif line.startswith('#') or not line.strip():
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -2852,3 +2852,12 @@ def decode_packed_codes(code):
|
||||||
return re.sub(
|
return re.sub(
|
||||||
r'\b(\w+)\b', lambda mobj: symbol_table[mobj.group(0)],
|
r'\b(\w+)\b', lambda mobj: symbol_table[mobj.group(0)],
|
||||||
obfucasted_code)
|
obfucasted_code)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_m3u8_attributes(attrib):
|
||||||
|
info = {}
|
||||||
|
for (key, val) in re.findall(r'(?P<key>[A-Z0-9-]+)=(?P<val>"[^"]+"|[^",]+)(?:,|$)', attrib):
|
||||||
|
if val.startswith('"'):
|
||||||
|
val = val[1:-1]
|
||||||
|
info[key] = val
|
||||||
|
return info
|
||||||
|
|
Loading…
Reference in a new issue