]> jfr.im git - yt-dlp.git/blobdiff - yt_dlp/downloader/fragment.py
[cleanup] misc
[yt-dlp.git] / yt_dlp / downloader / fragment.py
index a0c1d13ac2a3149cac7eac2bd907e89c3bd9da12..a9d1471f8c8f364ed71cd6b01a8bc311edec0372 100644 (file)
@@ -3,10 +3,23 @@
 import os
 import time
 import json
+from math import ceil
+
+try:
+    import concurrent.futures
+    can_threaded_download = True
+except ImportError:
+    can_threaded_download = False
 
 from .common import FileDownloader
 from .http import HttpFD
+from ..aes import aes_cbc_decrypt_bytes
+from ..compat import (
+    compat_urllib_error,
+    compat_struct_pack,
+)
 from ..utils import (
+    DownloadError,
     error_to_compat_str,
     encodeFilename,
     sanitize_open,
@@ -31,6 +44,7 @@ class FragmentFD(FileDownloader):
                         Skip unavailable fragments (DASH and hlsnative only)
     keep_fragments:     Keep downloaded fragments on disk after downloading is
                         finished
+    _no_ytdl_file:      Don't use .ytdl file
 
     For each incomplete fragment download yt-dlp keeps on disk a special
     bookkeeping file with download state and metadata (in future such files will
@@ -55,29 +69,32 @@ class FragmentFD(FileDownloader):
 
     def report_retry_fragment(self, err, frag_index, count, retries):
         self.to_screen(
-            '[download] Got server HTTP error: %s. Retrying fragment %d (attempt %d of %s) ...'
+            '\r[download] Got server HTTP error: %s. Retrying fragment %d (attempt %d of %s) ...'
             % (error_to_compat_str(err), frag_index, count, self.format_retries(retries)))
 
-    def report_skip_fragment(self, frag_index):
-        self.to_screen('[download] Skipping fragment %d ...' % frag_index)
+    def report_skip_fragment(self, frag_index, err=None):
+        err = f' {err};' if err else ''
+        self.to_screen(f'[download]{err} Skipping fragment {frag_index:d} ...')
 
     def _prepare_url(self, info_dict, url):
         headers = info_dict.get('http_headers')
         return sanitized_Request(url, None, headers) if headers else url
 
-    def _prepare_and_start_frag_download(self, ctx):
+    def _prepare_and_start_frag_download(self, ctx, info_dict):
         self._prepare_frag_download(ctx)
-        self._start_frag_download(ctx)
+        self._start_frag_download(ctx, info_dict)
 
-    @staticmethod
-    def __do_ytdl_file(ctx):
-        return not ctx['live'] and not ctx['tmpfilename'] == '-'
+    def __do_ytdl_file(self, ctx):
+        return not ctx['live'] and not ctx['tmpfilename'] == '-' and not self.params.get('_no_ytdl_file')
 
     def _read_ytdl_file(self, ctx):
         assert 'ytdl_corrupt' not in ctx
         stream, _ = sanitize_open(self.ytdl_filename(ctx['filename']), 'r')
         try:
-            ctx['fragment_index'] = json.loads(stream.read())['downloader']['current_fragment']['index']
+            ytdl_data = json.loads(stream.read())
+            ctx['fragment_index'] = ytdl_data['downloader']['current_fragment']['index']
+            if 'extra_state' in ytdl_data['downloader']:
+                ctx['extra_state'] = ytdl_data['downloader']['extra_state']
         except Exception:
             ctx['ytdl_corrupt'] = True
         finally:
@@ -85,15 +102,19 @@ def _read_ytdl_file(self, ctx):
 
     def _write_ytdl_file(self, ctx):
         frag_index_stream, _ = sanitize_open(self.ytdl_filename(ctx['filename']), 'w')
-        downloader = {
-            'current_fragment': {
-                'index': ctx['fragment_index'],
-            },
-        }
-        if ctx.get('fragment_count') is not None:
-            downloader['fragment_count'] = ctx['fragment_count']
-        frag_index_stream.write(json.dumps({'downloader': downloader}))
-        frag_index_stream.close()
+        try:
+            downloader = {
+                'current_fragment': {
+                    'index': ctx['fragment_index'],
+                },
+            }
+            if 'extra_state' in ctx:
+                downloader['extra_state'] = ctx['extra_state']
+            if ctx.get('fragment_count') is not None:
+                downloader['fragment_count'] = ctx['fragment_count']
+            frag_index_stream.write(json.dumps({'downloader': downloader}))
+        finally:
+            frag_index_stream.close()
 
     def _download_fragment(self, ctx, frag_url, info_dict, headers=None, request_data=None):
         fragment_filename = '%s-Frag%d' % (ctx['tmpfilename'], ctx['fragment_index'])
@@ -101,17 +122,22 @@ def _download_fragment(self, ctx, frag_url, info_dict, headers=None, request_dat
             'url': frag_url,
             'http_headers': headers or info_dict.get('http_headers'),
             'request_data': request_data,
+            'ctx_id': ctx.get('ctx_id'),
         }
         success = ctx['dl'].download(fragment_filename, fragment_info_dict)
         if not success:
             return False, None
         if fragment_info_dict.get('filetime'):
             ctx['fragment_filetime'] = fragment_info_dict.get('filetime')
-        down, frag_sanitized = sanitize_open(fragment_filename, 'rb')
+        ctx['fragment_filename_sanitized'] = fragment_filename
+        return True, self._read_fragment(ctx)
+
+    def _read_fragment(self, ctx):
+        down, frag_sanitized = sanitize_open(ctx['fragment_filename_sanitized'], 'rb')
         ctx['fragment_filename_sanitized'] = frag_sanitized
         frag_content = down.read()
         down.close()
-        return True, frag_content
+        return frag_content
 
     def _append_fragment(self, ctx, frag_content):
         try:
@@ -193,9 +219,10 @@ def _prepare_frag_download(self, ctx):
             'complete_frags_downloaded_bytes': resume_len,
         })
 
-    def _start_frag_download(self, ctx):
+    def _start_frag_download(self, ctx, info_dict):
         resume_len = ctx['complete_frags_downloaded_bytes']
         total_frags = ctx['total_frags']
+        ctx_id = ctx.get('ctx_id')
         # This dict stores the download progress, it's updated by the progress
         # hook
         state = {
@@ -219,9 +246,16 @@ def frag_progress_hook(s):
             if s['status'] not in ('downloading', 'finished'):
                 return
 
+            if ctx_id is not None and s.get('ctx_id') != ctx_id:
+                return
+
+            state['max_progress'] = ctx.get('max_progress')
+            state['progress_idx'] = ctx.get('progress_idx')
+
             time_now = time.time()
             state['elapsed'] = time_now - start
             frag_total_bytes = s.get('total_bytes') or 0
+            s['fragment_info_dict'] = s.pop('info_dict', {})
             if not ctx['live']:
                 estimated_size = (
                     (ctx['complete_frags_downloaded_bytes'] + frag_total_bytes)
@@ -244,13 +278,13 @@ def frag_progress_hook(s):
                 state['speed'] = s.get('speed') or ctx.get('speed')
                 ctx['speed'] = state['speed']
                 ctx['prev_frag_downloaded_bytes'] = frag_downloaded_bytes
-            self._hook_progress(state)
+            self._hook_progress(state, info_dict)
 
         ctx['dl'].add_progress_hook(frag_progress_hook)
 
         return start
 
-    def _finish_frag_download(self, ctx):
+    def _finish_frag_download(self, ctx, info_dict):
         ctx['dest_stream'].close()
         if self.__do_ytdl_file(ctx):
             ytdl_filename = encodeFilename(self.ytdl_filename(ctx['filename']))
@@ -277,7 +311,10 @@ def _finish_frag_download(self, ctx):
             'filename': ctx['filename'],
             'status': 'finished',
             'elapsed': elapsed,
-        })
+            'ctx_id': ctx.get('ctx_id'),
+            'max_progress': ctx.get('max_progress'),
+            'progress_idx': ctx.get('progress_idx'),
+        }, info_dict)
 
     def _prepare_external_frag_download(self, ctx):
         if 'live' not in ctx:
@@ -299,3 +336,152 @@ def _prepare_external_frag_download(self, ctx):
             'tmpfilename': tmpfilename,
             'fragment_index': 0,
         })
+
+    def decrypter(self, info_dict):
+        _key_cache = {}
+
+        def _get_key(url):
+            if url not in _key_cache:
+                _key_cache[url] = self.ydl.urlopen(self._prepare_url(info_dict, url)).read()
+            return _key_cache[url]
+
+        def decrypt_fragment(fragment, frag_content):
+            decrypt_info = fragment.get('decrypt_info')
+            if not decrypt_info or decrypt_info['METHOD'] != 'AES-128':
+                return frag_content
+            iv = decrypt_info.get('IV') or compat_struct_pack('>8xq', fragment['media_sequence'])
+            decrypt_info['KEY'] = decrypt_info.get('KEY') or _get_key(info_dict.get('_decryption_key_url') or decrypt_info['URI'])
+            # Don't decrypt the content in tests since the data is explicitly truncated and it's not to a valid block
+            # size (see https://github.com/ytdl-org/youtube-dl/pull/27660). Tests only care that the correct data downloaded,
+            # not what it decrypts to.
+            if self.params.get('test', False):
+                return frag_content
+            decrypted_data = aes_cbc_decrypt_bytes(frag_content, decrypt_info['KEY'], iv)
+            return decrypted_data[:-decrypted_data[-1]]
+
+        return decrypt_fragment
+
+    def download_and_append_fragments_multiple(self, *args, pack_func=None, finish_func=None):
+        '''
+        @params (ctx1, fragments1, info_dict1), (ctx2, fragments2, info_dict2), ...
+                all args must be either tuple or list
+        '''
+        max_progress = len(args)
+        if max_progress == 1:
+            return self.download_and_append_fragments(*args[0], pack_func=pack_func, finish_func=finish_func)
+        max_workers = self.params.get('concurrent_fragment_downloads', max_progress)
+        if max_progress > 1:
+            self._prepare_multiline_status(max_progress)
+
+        def thread_func(idx, ctx, fragments, info_dict, tpe):
+            ctx['max_progress'] = max_progress
+            ctx['progress_idx'] = idx
+            return self.download_and_append_fragments(ctx, fragments, info_dict, pack_func=pack_func, finish_func=finish_func, tpe=tpe)
+
+        class FTPE(concurrent.futures.ThreadPoolExecutor):
+            # has to stop this or it's going to wait on the worker thread itself
+            def __exit__(self, exc_type, exc_val, exc_tb):
+                pass
+
+        spins = []
+        for idx, (ctx, fragments, info_dict) in enumerate(args):
+            tpe = FTPE(ceil(max_workers / max_progress))
+            job = tpe.submit(thread_func, idx, ctx, fragments, info_dict, tpe)
+            spins.append((tpe, job))
+
+        result = True
+        for tpe, job in spins:
+            try:
+                result = result and job.result()
+            finally:
+                tpe.shutdown(wait=True)
+        return result
+
+    def download_and_append_fragments(self, ctx, fragments, info_dict, *, pack_func=None, finish_func=None, tpe=None):
+        fragment_retries = self.params.get('fragment_retries', 0)
+        is_fatal = (lambda idx: idx == 0) if self.params.get('skip_unavailable_fragments', True) else (lambda _: True)
+        if not pack_func:
+            pack_func = lambda frag_content, _: frag_content
+
+        def download_fragment(fragment, ctx):
+            frag_index = ctx['fragment_index'] = fragment['frag_index']
+            headers = info_dict.get('http_headers', {}).copy()
+            byte_range = fragment.get('byte_range')
+            if byte_range:
+                headers['Range'] = 'bytes=%d-%d' % (byte_range['start'], byte_range['end'] - 1)
+
+            # Never skip the first fragment
+            fatal = is_fatal(fragment.get('index') or (frag_index - 1))
+            count, frag_content = 0, None
+            while count <= fragment_retries:
+                try:
+                    success, frag_content = self._download_fragment(ctx, fragment['url'], info_dict, headers)
+                    if not success:
+                        return False, frag_index
+                    break
+                except compat_urllib_error.HTTPError as err:
+                    # Unavailable (possibly temporary) fragments may be served.
+                    # First we try to retry then either skip or abort.
+                    # See https://github.com/ytdl-org/youtube-dl/issues/10165,
+                    # https://github.com/ytdl-org/youtube-dl/issues/10448).
+                    count += 1
+                    if count <= fragment_retries:
+                        self.report_retry_fragment(err, frag_index, count, fragment_retries)
+                except DownloadError:
+                    # Don't retry fragment if error occurred during HTTP downloading
+                    # itself since it has own retry settings
+                    if not fatal:
+                        break
+                    raise
+
+            if count > fragment_retries:
+                if not fatal:
+                    return False, frag_index
+                ctx['dest_stream'].close()
+                self.report_error('Giving up after %s fragment retries' % fragment_retries)
+                return False, frag_index
+            return frag_content, frag_index
+
+        def append_fragment(frag_content, frag_index, ctx):
+            if not frag_content:
+                if not is_fatal(frag_index - 1):
+                    self.report_skip_fragment(frag_index, 'fragment not found')
+                    return True
+                else:
+                    ctx['dest_stream'].close()
+                    self.report_error(
+                        'fragment %s not found, unable to continue' % frag_index)
+                    return False
+            self._append_fragment(ctx, pack_func(frag_content, frag_index))
+            return True
+
+        decrypt_fragment = self.decrypter(info_dict)
+
+        max_workers = self.params.get('concurrent_fragment_downloads', 1)
+        if can_threaded_download and max_workers > 1:
+
+            def _download_fragment(fragment):
+                ctx_copy = ctx.copy()
+                frag_content, frag_index = download_fragment(fragment, ctx_copy)
+                return fragment, frag_content, frag_index, ctx_copy.get('fragment_filename_sanitized')
+
+            self.report_warning('The download speed shown is only of one thread. This is a known issue and patches are welcome')
+            with tpe or concurrent.futures.ThreadPoolExecutor(max_workers) as pool:
+                for fragment, frag_content, frag_index, frag_filename in pool.map(_download_fragment, fragments):
+                    ctx['fragment_filename_sanitized'] = frag_filename
+                    ctx['fragment_index'] = frag_index
+                    result = append_fragment(decrypt_fragment(fragment, frag_content), frag_index, ctx)
+                    if not result:
+                        return False
+        else:
+            for fragment in fragments:
+                frag_content, frag_index = download_fragment(fragment, ctx)
+                result = append_fragment(decrypt_fragment(fragment, frag_content), frag_index, ctx)
+                if not result:
+                    return False
+
+        if finish_func is not None:
+            ctx['dest_stream'].write(finish_func())
+            ctx['dest_stream'].flush()
+        self._finish_frag_download(ctx, info_dict)
+        return True