]> jfr.im git - yt-dlp.git/blobdiff - yt_dlp/downloader/fragment.py
[dash] Show fragment count with `--live-from-start` (#3493)
[yt-dlp.git] / yt_dlp / downloader / fragment.py
index 7b213cd5f8f403f065c43a0d7b32fc888efef308..79161b8092d32f8e09adef2c1396ecd2a5f63e50 100644 (file)
@@ -1,29 +1,19 @@
-from __future__ import division, unicode_literals
-
+import concurrent.futures
+import contextlib
 import http.client
 import json
 import math
 import os
 import time
 
-try:
-    import concurrent.futures
-    can_threaded_download = True
-except ImportError:
-    can_threaded_download = False
-
 from .common import FileDownloader
 from .http import HttpFD
 from ..aes import aes_cbc_decrypt_bytes, unpad_pkcs7
-from ..compat import (
-    compat_os_name,
-    compat_urllib_error,
-    compat_struct_pack,
-)
+from ..compat import compat_os_name, compat_struct_pack, compat_urllib_error
 from ..utils import (
     DownloadError,
-    error_to_compat_str,
     encodeFilename,
+    error_to_compat_str,
     sanitized_Request,
     traverse_obj,
 )
@@ -33,9 +23,7 @@ class HttpQuietDownloader(HttpFD):
     def to_screen(self, *args, **kargs):
         pass
 
-    def report_retry(self, err, count, retries):
-        super().to_screen(
-            f'[download] Got server HTTP error: {err}. Retrying (attempt {count} of {self.format_retries(retries)}) ...')
+    console_title = to_screen
 
 
 class FragmentFD(FileDownloader):
@@ -78,6 +66,7 @@ def report_retry_fragment(self, err, frag_index, count, retries):
         self.to_screen(
             '\r[download] Got server HTTP error: %s. Retrying fragment %d (attempt %d of %s) ...'
             % (error_to_compat_str(err), frag_index, count, self.format_retries(retries)))
+        self.sleep_retry('fragment', count)
 
     def report_skip_fragment(self, frag_index, err=None):
         err = f' {err};' if err else ''
@@ -131,16 +120,23 @@ def _download_fragment(self, ctx, frag_url, info_dict, headers=None, request_dat
             'request_data': request_data,
             'ctx_id': ctx.get('ctx_id'),
         }
-        success = ctx['dl'].download(fragment_filename, fragment_info_dict)
+        success, _ = ctx['dl'].download(fragment_filename, fragment_info_dict)
         if not success:
-            return False, None
+            return False
         if fragment_info_dict.get('filetime'):
             ctx['fragment_filetime'] = fragment_info_dict.get('filetime')
         ctx['fragment_filename_sanitized'] = fragment_filename
-        return True, self._read_fragment(ctx)
+        return True
 
     def _read_fragment(self, ctx):
-        down, frag_sanitized = self.sanitize_open(ctx['fragment_filename_sanitized'], 'rb')
+        if not ctx.get('fragment_filename_sanitized'):
+            return None
+        try:
+            down, frag_sanitized = self.sanitize_open(ctx['fragment_filename_sanitized'], 'rb')
+        except FileNotFoundError:
+            if ctx.get('live'):
+                return None
+            raise
         ctx['fragment_filename_sanitized'] = frag_sanitized
         frag_content = down.read()
         down.close()
@@ -154,7 +150,7 @@ def _append_fragment(self, ctx, frag_content):
             if self.__do_ytdl_file(ctx):
                 self._write_ytdl_file(ctx)
             if not self.params.get('keep_fragments', False):
-                os.remove(encodeFilename(ctx['fragment_filename_sanitized']))
+                self.try_remove(encodeFilename(ctx['fragment_filename_sanitized']))
             del ctx['fragment_filename_sanitized']
 
     def _prepare_frag_download(self, ctx):
@@ -167,21 +163,13 @@ def _prepare_frag_download(self, ctx):
                 total_frags_str += ' (not including %d ad)' % ad_frags
         else:
             total_frags_str = 'unknown (live)'
-        self.to_screen(
-            '[%s] Total fragments: %s' % (self.FD_NAME, total_frags_str))
+        self.to_screen(f'[{self.FD_NAME}] Total fragments: {total_frags_str}')
         self.report_destination(ctx['filename'])
-        dl = HttpQuietDownloader(
-            self.ydl,
-            {
-                'continuedl': True,
-                'quiet': self.params.get('quiet'),
-                'noprogress': True,
-                'ratelimit': self.params.get('ratelimit'),
-                'retries': self.params.get('retries', 0),
-                'nopart': self.params.get('nopart', False),
-                'test': self.params.get('test', False),
-            }
-        )
+        dl = HttpQuietDownloader(self.ydl, {
+            **self.params,
+            'noprogress': True,
+            'test': False,
+        })
         tmpfilename = self.temp_name(ctx['filename'])
         open_mode = 'wb'
         resume_len = 0
@@ -254,6 +242,9 @@ def frag_progress_hook(s):
             if s['status'] not in ('downloading', 'finished'):
                 return
 
+            if not total_frags and ctx.get('fragment_count'):
+                state['fragment_count'] = ctx['fragment_count']
+
             if ctx_id is not None and s.get('ctx_id') != ctx_id:
                 return
 
@@ -300,7 +291,7 @@ def _finish_frag_download(self, ctx, info_dict):
         if self.__do_ytdl_file(ctx):
             ytdl_filename = encodeFilename(self.ytdl_filename(ctx['filename']))
             if os.path.isfile(ytdl_filename):
-                os.remove(ytdl_filename)
+                self.try_remove(ytdl_filename)
         elapsed = time.time() - ctx['started']
 
         if ctx['tmpfilename'] == '-':
@@ -310,10 +301,8 @@ def _finish_frag_download(self, ctx, info_dict):
             if self.params.get('updatetime', True):
                 filetime = ctx.get('fragment_filetime')
                 if filetime:
-                    try:
+                    with contextlib.suppress(Exception):
                         os.utime(ctx['filename'], (time.time(), filetime))
-                    except Exception:
-                        pass
             downloaded_bytes = os.path.getsize(encodeFilename(ctx['filename']))
 
         self._hook_progress({
@@ -337,8 +326,7 @@ def _prepare_external_frag_download(self, ctx):
                 total_frags_str += ' (not including %d ad)' % ad_frags
         else:
             total_frags_str = 'unknown (live)'
-        self.to_screen(
-            '[%s] Total fragments: %s' % (self.FD_NAME, total_frags_str))
+        self.to_screen(f'[{self.FD_NAME}] Total fragments: {total_frags_str}')
 
         tmpfilename = self.temp_name(ctx['filename'])
 
@@ -398,7 +386,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):
                 pass
 
         if compat_os_name == 'nt':
-            def bindoj_result(future):
+            def future_result(future):
                 while True:
                     try:
                         return future.result(0.1)
@@ -407,7 +395,7 @@ def bindoj_result(future):
                     except concurrent.futures.TimeoutError:
                         continue
         else:
-            def bindoj_result(future):
+            def future_result(future):
                 return future.result()
 
         def interrupt_trigger_iter(fg):
@@ -425,7 +413,7 @@ def interrupt_trigger_iter(fg):
         result = True
         for tpe, job in spins:
             try:
-                result = result and bindoj_result(job)
+                result = result and future_result(job)
             except KeyboardInterrupt:
                 interrupt_trigger[0] = False
             finally:
@@ -452,7 +440,7 @@ def download_and_append_fragments(
 
         def download_fragment(fragment, ctx):
             if not interrupt_trigger[0]:
-                return False, fragment['frag_index']
+                return
 
             frag_index = ctx['fragment_index'] = fragment['frag_index']
             ctx['last_error'] = None
@@ -462,14 +450,13 @@ def download_fragment(fragment, ctx):
                 headers['Range'] = 'bytes=%d-%d' % (byte_range['start'], byte_range['end'] - 1)
 
             # Never skip the first fragment
-            fatal = is_fatal(fragment.get('index') or (frag_index - 1))
-            count, frag_content = 0, None
+            fatal, count = is_fatal(fragment.get('index') or (frag_index - 1)), 0
             while count <= fragment_retries:
                 try:
-                    success, frag_content = self._download_fragment(ctx, fragment['url'], info_dict, headers)
-                    if not success:
-                        return False, frag_index
-                    break
+                    ctx['fragment_count'] = fragment.get('fragment_count')
+                    if self._download_fragment(ctx, fragment['url'], info_dict, headers):
+                        break
+                    return
                 except (compat_urllib_error.HTTPError, http.client.IncompleteRead) as err:
                     # Unavailable (possibly temporary) fragments may be served.
                     # First we try to retry then either skip or abort.
@@ -486,52 +473,59 @@ def download_fragment(fragment, ctx):
                         break
                     raise
 
-            if count > fragment_retries:
-                if not fatal:
-                    return False, frag_index
+            if count > fragment_retries and fatal:
                 ctx['dest_stream'].close()
                 self.report_error('Giving up after %s fragment retries' % fragment_retries)
-                return False, frag_index
-            return frag_content, frag_index
 
         def append_fragment(frag_content, frag_index, ctx):
-            if not frag_content:
-                if not is_fatal(frag_index - 1):
-                    self.report_skip_fragment(frag_index, 'fragment not found')
-                    return True
-                else:
-                    ctx['dest_stream'].close()
-                    self.report_error(
-                        'fragment %s not found, unable to continue' % frag_index)
-                    return False
-            self._append_fragment(ctx, pack_func(frag_content, frag_index))
+            if frag_content:
+                self._append_fragment(ctx, pack_func(frag_content, frag_index))
+            elif not is_fatal(frag_index - 1):
+                self.report_skip_fragment(frag_index, 'fragment not found')
+            else:
+                ctx['dest_stream'].close()
+                self.report_error(f'fragment {frag_index} not found, unable to continue')
+                return False
             return True
 
         decrypt_fragment = self.decrypter(info_dict)
 
         max_workers = math.ceil(
             self.params.get('concurrent_fragment_downloads', 1) / ctx.get('max_progress', 1))
-        if can_threaded_download and max_workers > 1:
-
+        if max_workers > 1:
             def _download_fragment(fragment):
                 ctx_copy = ctx.copy()
-                frag_content, frag_index = download_fragment(fragment, ctx_copy)
-                return fragment, frag_content, frag_index, ctx_copy.get('fragment_filename_sanitized')
+                download_fragment(fragment, ctx_copy)
+                return fragment, fragment['frag_index'], ctx_copy.get('fragment_filename_sanitized')
 
             self.report_warning('The download speed shown is only of one thread. This is a known issue and patches are welcome')
             with tpe or concurrent.futures.ThreadPoolExecutor(max_workers) as pool:
-                for fragment, frag_content, frag_index, frag_filename in pool.map(_download_fragment, fragments):
-                    ctx['fragment_filename_sanitized'] = frag_filename
-                    ctx['fragment_index'] = frag_index
-                    result = append_fragment(decrypt_fragment(fragment, frag_content), frag_index, ctx)
-                    if not result:
-                        return False
+                try:
+                    for fragment, frag_index, frag_filename in pool.map(_download_fragment, fragments):
+                        ctx.update({
+                            'fragment_filename_sanitized': frag_filename,
+                            'fragment_index': frag_index,
+                        })
+                        if not append_fragment(decrypt_fragment(fragment, self._read_fragment(ctx)), frag_index, ctx):
+                            return False
+                except KeyboardInterrupt:
+                    self._finish_multiline_status()
+                    self.report_error(
+                        'Interrupted by user. Waiting for all threads to shutdown...', is_error=False, tb=False)
+                    pool.shutdown(wait=False)
+                    raise
         else:
             for fragment in fragments:
                 if not interrupt_trigger[0]:
                     break
-                frag_content, frag_index = download_fragment(fragment, ctx)
-                result = append_fragment(decrypt_fragment(fragment, frag_content), frag_index, ctx)
+                try:
+                    download_fragment(fragment, ctx)
+                    result = append_fragment(
+                        decrypt_fragment(fragment, self._read_fragment(ctx)), fragment['frag_index'], ctx)
+                except KeyboardInterrupt:
+                    if info_dict.get('is_live'):
+                        break
+                    raise
                 if not result:
                     return False