]> jfr.im git - yt-dlp.git/blob - yt_dlp/downloader/fragment.py
[cleanup] Misc fixes
[yt-dlp.git] / yt_dlp / downloader / fragment.py
1 import concurrent.futures
2 import contextlib
3 import http.client
4 import json
5 import math
6 import os
7 import time
8
9 from .common import FileDownloader
10 from .http import HttpFD
11 from ..aes import aes_cbc_decrypt_bytes, unpad_pkcs7
12 from ..compat import compat_os_name, compat_struct_pack, compat_urllib_error
13 from ..utils import (
14 DownloadError,
15 encodeFilename,
16 error_to_compat_str,
17 sanitized_Request,
18 traverse_obj,
19 )
20
21
22 class HttpQuietDownloader(HttpFD):
23 def to_screen(self, *args, **kargs):
24 pass
25
26 console_title = to_screen
27
28
29 class FragmentFD(FileDownloader):
30 """
31 A base file downloader class for fragmented media (e.g. f4m/m3u8 manifests).
32
33 Available options:
34
35 fragment_retries: Number of times to retry a fragment for HTTP error (DASH
36 and hlsnative only)
37 skip_unavailable_fragments:
38 Skip unavailable fragments (DASH and hlsnative only)
39 keep_fragments: Keep downloaded fragments on disk after downloading is
40 finished
41 concurrent_fragment_downloads: The number of threads to use for native hls and dash downloads
42 _no_ytdl_file: Don't use .ytdl file
43
44 For each incomplete fragment download yt-dlp keeps on disk a special
45 bookkeeping file with download state and metadata (in future such files will
46 be used for any incomplete download handled by yt-dlp). This file is
47 used to properly handle resuming, check download file consistency and detect
48 potential errors. The file has a .ytdl extension and represents a standard
49 JSON file of the following format:
50
51 extractor:
52 Dictionary of extractor related data. TBD.
53
54 downloader:
55 Dictionary of downloader related data. May contain following data:
56 current_fragment:
57 Dictionary with current (being downloaded) fragment data:
58 index: 0-based index of current fragment among all fragments
59 fragment_count:
60 Total count of fragments
61
62 This feature is experimental and file format may change in future.
63 """
64
65 def report_retry_fragment(self, err, frag_index, count, retries):
66 self.to_screen(
67 '\r[download] Got server HTTP error: %s. Retrying fragment %d (attempt %d of %s) ...'
68 % (error_to_compat_str(err), frag_index, count, self.format_retries(retries)))
69 self.sleep_retry('fragment', count)
70
71 def report_skip_fragment(self, frag_index, err=None):
72 err = f' {err};' if err else ''
73 self.to_screen(f'[download]{err} Skipping fragment {frag_index:d} ...')
74
75 def _prepare_url(self, info_dict, url):
76 headers = info_dict.get('http_headers')
77 return sanitized_Request(url, None, headers) if headers else url
78
79 def _prepare_and_start_frag_download(self, ctx, info_dict):
80 self._prepare_frag_download(ctx)
81 self._start_frag_download(ctx, info_dict)
82
83 def __do_ytdl_file(self, ctx):
84 return ctx['live'] is not True and ctx['tmpfilename'] != '-' and not self.params.get('_no_ytdl_file')
85
86 def _read_ytdl_file(self, ctx):
87 assert 'ytdl_corrupt' not in ctx
88 stream, _ = self.sanitize_open(self.ytdl_filename(ctx['filename']), 'r')
89 try:
90 ytdl_data = json.loads(stream.read())
91 ctx['fragment_index'] = ytdl_data['downloader']['current_fragment']['index']
92 if 'extra_state' in ytdl_data['downloader']:
93 ctx['extra_state'] = ytdl_data['downloader']['extra_state']
94 except Exception:
95 ctx['ytdl_corrupt'] = True
96 finally:
97 stream.close()
98
99 def _write_ytdl_file(self, ctx):
100 frag_index_stream, _ = self.sanitize_open(self.ytdl_filename(ctx['filename']), 'w')
101 try:
102 downloader = {
103 'current_fragment': {
104 'index': ctx['fragment_index'],
105 },
106 }
107 if 'extra_state' in ctx:
108 downloader['extra_state'] = ctx['extra_state']
109 if ctx.get('fragment_count') is not None:
110 downloader['fragment_count'] = ctx['fragment_count']
111 frag_index_stream.write(json.dumps({'downloader': downloader}))
112 finally:
113 frag_index_stream.close()
114
115 def _download_fragment(self, ctx, frag_url, info_dict, headers=None, request_data=None):
116 fragment_filename = '%s-Frag%d' % (ctx['tmpfilename'], ctx['fragment_index'])
117 fragment_info_dict = {
118 'url': frag_url,
119 'http_headers': headers or info_dict.get('http_headers'),
120 'request_data': request_data,
121 'ctx_id': ctx.get('ctx_id'),
122 }
123 success, _ = ctx['dl'].download(fragment_filename, fragment_info_dict)
124 if not success:
125 return False
126 if fragment_info_dict.get('filetime'):
127 ctx['fragment_filetime'] = fragment_info_dict.get('filetime')
128 ctx['fragment_filename_sanitized'] = fragment_filename
129 return True
130
131 def _read_fragment(self, ctx):
132 if not ctx.get('fragment_filename_sanitized'):
133 return None
134 try:
135 down, frag_sanitized = self.sanitize_open(ctx['fragment_filename_sanitized'], 'rb')
136 except FileNotFoundError:
137 if ctx.get('live'):
138 return None
139 raise
140 ctx['fragment_filename_sanitized'] = frag_sanitized
141 frag_content = down.read()
142 down.close()
143 return frag_content
144
145 def _append_fragment(self, ctx, frag_content):
146 try:
147 ctx['dest_stream'].write(frag_content)
148 ctx['dest_stream'].flush()
149 finally:
150 if self.__do_ytdl_file(ctx):
151 self._write_ytdl_file(ctx)
152 if not self.params.get('keep_fragments', False):
153 self.try_remove(encodeFilename(ctx['fragment_filename_sanitized']))
154 del ctx['fragment_filename_sanitized']
155
156 def _prepare_frag_download(self, ctx):
157 if 'live' not in ctx:
158 ctx['live'] = False
159 if not ctx['live']:
160 total_frags_str = '%d' % ctx['total_frags']
161 ad_frags = ctx.get('ad_frags', 0)
162 if ad_frags:
163 total_frags_str += ' (not including %d ad)' % ad_frags
164 else:
165 total_frags_str = 'unknown (live)'
166 self.to_screen(f'[{self.FD_NAME}] Total fragments: {total_frags_str}')
167 self.report_destination(ctx['filename'])
168 dl = HttpQuietDownloader(self.ydl, {
169 **self.params,
170 'noprogress': True,
171 'test': False,
172 })
173 tmpfilename = self.temp_name(ctx['filename'])
174 open_mode = 'wb'
175 resume_len = 0
176
177 # Establish possible resume length
178 if os.path.isfile(encodeFilename(tmpfilename)):
179 open_mode = 'ab'
180 resume_len = os.path.getsize(encodeFilename(tmpfilename))
181
182 # Should be initialized before ytdl file check
183 ctx.update({
184 'tmpfilename': tmpfilename,
185 'fragment_index': 0,
186 })
187
188 if self.__do_ytdl_file(ctx):
189 if os.path.isfile(encodeFilename(self.ytdl_filename(ctx['filename']))):
190 self._read_ytdl_file(ctx)
191 is_corrupt = ctx.get('ytdl_corrupt') is True
192 is_inconsistent = ctx['fragment_index'] > 0 and resume_len == 0
193 if is_corrupt or is_inconsistent:
194 message = (
195 '.ytdl file is corrupt' if is_corrupt else
196 'Inconsistent state of incomplete fragment download')
197 self.report_warning(
198 '%s. Restarting from the beginning ...' % message)
199 ctx['fragment_index'] = resume_len = 0
200 if 'ytdl_corrupt' in ctx:
201 del ctx['ytdl_corrupt']
202 self._write_ytdl_file(ctx)
203 else:
204 self._write_ytdl_file(ctx)
205 assert ctx['fragment_index'] == 0
206
207 dest_stream, tmpfilename = self.sanitize_open(tmpfilename, open_mode)
208
209 ctx.update({
210 'dl': dl,
211 'dest_stream': dest_stream,
212 'tmpfilename': tmpfilename,
213 # Total complete fragments downloaded so far in bytes
214 'complete_frags_downloaded_bytes': resume_len,
215 })
216
217 def _start_frag_download(self, ctx, info_dict):
218 resume_len = ctx['complete_frags_downloaded_bytes']
219 total_frags = ctx['total_frags']
220 ctx_id = ctx.get('ctx_id')
221 # This dict stores the download progress, it's updated by the progress
222 # hook
223 state = {
224 'status': 'downloading',
225 'downloaded_bytes': resume_len,
226 'fragment_index': ctx['fragment_index'],
227 'fragment_count': total_frags,
228 'filename': ctx['filename'],
229 'tmpfilename': ctx['tmpfilename'],
230 }
231
232 start = time.time()
233 ctx.update({
234 'started': start,
235 'fragment_started': start,
236 # Amount of fragment's bytes downloaded by the time of the previous
237 # frag progress hook invocation
238 'prev_frag_downloaded_bytes': 0,
239 })
240
241 def frag_progress_hook(s):
242 if s['status'] not in ('downloading', 'finished'):
243 return
244
245 if ctx_id is not None and s.get('ctx_id') != ctx_id:
246 return
247
248 state['max_progress'] = ctx.get('max_progress')
249 state['progress_idx'] = ctx.get('progress_idx')
250
251 time_now = time.time()
252 state['elapsed'] = time_now - start
253 frag_total_bytes = s.get('total_bytes') or 0
254 s['fragment_info_dict'] = s.pop('info_dict', {})
255 if not ctx['live']:
256 estimated_size = (
257 (ctx['complete_frags_downloaded_bytes'] + frag_total_bytes)
258 / (state['fragment_index'] + 1) * total_frags)
259 state['total_bytes_estimate'] = estimated_size
260
261 if s['status'] == 'finished':
262 state['fragment_index'] += 1
263 ctx['fragment_index'] = state['fragment_index']
264 state['downloaded_bytes'] += frag_total_bytes - ctx['prev_frag_downloaded_bytes']
265 ctx['complete_frags_downloaded_bytes'] = state['downloaded_bytes']
266 ctx['speed'] = state['speed'] = self.calc_speed(
267 ctx['fragment_started'], time_now, frag_total_bytes)
268 ctx['fragment_started'] = time.time()
269 ctx['prev_frag_downloaded_bytes'] = 0
270 else:
271 frag_downloaded_bytes = s['downloaded_bytes']
272 state['downloaded_bytes'] += frag_downloaded_bytes - ctx['prev_frag_downloaded_bytes']
273 if not ctx['live']:
274 state['eta'] = self.calc_eta(
275 start, time_now, estimated_size - resume_len,
276 state['downloaded_bytes'] - resume_len)
277 ctx['speed'] = state['speed'] = self.calc_speed(
278 ctx['fragment_started'], time_now, frag_downloaded_bytes)
279 ctx['prev_frag_downloaded_bytes'] = frag_downloaded_bytes
280 self._hook_progress(state, info_dict)
281
282 ctx['dl'].add_progress_hook(frag_progress_hook)
283
284 return start
285
286 def _finish_frag_download(self, ctx, info_dict):
287 ctx['dest_stream'].close()
288 if self.__do_ytdl_file(ctx):
289 ytdl_filename = encodeFilename(self.ytdl_filename(ctx['filename']))
290 if os.path.isfile(ytdl_filename):
291 self.try_remove(ytdl_filename)
292 elapsed = time.time() - ctx['started']
293
294 if ctx['tmpfilename'] == '-':
295 downloaded_bytes = ctx['complete_frags_downloaded_bytes']
296 else:
297 self.try_rename(ctx['tmpfilename'], ctx['filename'])
298 if self.params.get('updatetime', True):
299 filetime = ctx.get('fragment_filetime')
300 if filetime:
301 with contextlib.suppress(Exception):
302 os.utime(ctx['filename'], (time.time(), filetime))
303 downloaded_bytes = os.path.getsize(encodeFilename(ctx['filename']))
304
305 self._hook_progress({
306 'downloaded_bytes': downloaded_bytes,
307 'total_bytes': downloaded_bytes,
308 'filename': ctx['filename'],
309 'status': 'finished',
310 'elapsed': elapsed,
311 'ctx_id': ctx.get('ctx_id'),
312 'max_progress': ctx.get('max_progress'),
313 'progress_idx': ctx.get('progress_idx'),
314 }, info_dict)
315
316 def _prepare_external_frag_download(self, ctx):
317 if 'live' not in ctx:
318 ctx['live'] = False
319 if not ctx['live']:
320 total_frags_str = '%d' % ctx['total_frags']
321 ad_frags = ctx.get('ad_frags', 0)
322 if ad_frags:
323 total_frags_str += ' (not including %d ad)' % ad_frags
324 else:
325 total_frags_str = 'unknown (live)'
326 self.to_screen(f'[{self.FD_NAME}] Total fragments: {total_frags_str}')
327
328 tmpfilename = self.temp_name(ctx['filename'])
329
330 # Should be initialized before ytdl file check
331 ctx.update({
332 'tmpfilename': tmpfilename,
333 'fragment_index': 0,
334 })
335
336 def decrypter(self, info_dict):
337 _key_cache = {}
338
339 def _get_key(url):
340 if url not in _key_cache:
341 _key_cache[url] = self.ydl.urlopen(self._prepare_url(info_dict, url)).read()
342 return _key_cache[url]
343
344 def decrypt_fragment(fragment, frag_content):
345 decrypt_info = fragment.get('decrypt_info')
346 if not decrypt_info or decrypt_info['METHOD'] != 'AES-128':
347 return frag_content
348 iv = decrypt_info.get('IV') or compat_struct_pack('>8xq', fragment['media_sequence'])
349 decrypt_info['KEY'] = decrypt_info.get('KEY') or _get_key(info_dict.get('_decryption_key_url') or decrypt_info['URI'])
350 # Don't decrypt the content in tests since the data is explicitly truncated and it's not to a valid block
351 # size (see https://github.com/ytdl-org/youtube-dl/pull/27660). Tests only care that the correct data downloaded,
352 # not what it decrypts to.
353 if self.params.get('test', False):
354 return frag_content
355 return unpad_pkcs7(aes_cbc_decrypt_bytes(frag_content, decrypt_info['KEY'], iv))
356
357 return decrypt_fragment
358
359 def download_and_append_fragments_multiple(self, *args, pack_func=None, finish_func=None):
360 '''
361 @params (ctx1, fragments1, info_dict1), (ctx2, fragments2, info_dict2), ...
362 all args must be either tuple or list
363 '''
364 interrupt_trigger = [True]
365 max_progress = len(args)
366 if max_progress == 1:
367 return self.download_and_append_fragments(*args[0], pack_func=pack_func, finish_func=finish_func)
368 max_workers = self.params.get('concurrent_fragment_downloads', 1)
369 if max_progress > 1:
370 self._prepare_multiline_status(max_progress)
371 is_live = any(traverse_obj(args, (..., 2, 'is_live'), default=[]))
372
373 def thread_func(idx, ctx, fragments, info_dict, tpe):
374 ctx['max_progress'] = max_progress
375 ctx['progress_idx'] = idx
376 return self.download_and_append_fragments(
377 ctx, fragments, info_dict, pack_func=pack_func, finish_func=finish_func,
378 tpe=tpe, interrupt_trigger=interrupt_trigger)
379
380 class FTPE(concurrent.futures.ThreadPoolExecutor):
381 # has to stop this or it's going to wait on the worker thread itself
382 def __exit__(self, exc_type, exc_val, exc_tb):
383 pass
384
385 if compat_os_name == 'nt':
386 def future_result(future):
387 while True:
388 try:
389 return future.result(0.1)
390 except KeyboardInterrupt:
391 raise
392 except concurrent.futures.TimeoutError:
393 continue
394 else:
395 def future_result(future):
396 return future.result()
397
398 def interrupt_trigger_iter(fg):
399 for f in fg:
400 if not interrupt_trigger[0]:
401 break
402 yield f
403
404 spins = []
405 for idx, (ctx, fragments, info_dict) in enumerate(args):
406 tpe = FTPE(math.ceil(max_workers / max_progress))
407 job = tpe.submit(thread_func, idx, ctx, interrupt_trigger_iter(fragments), info_dict, tpe)
408 spins.append((tpe, job))
409
410 result = True
411 for tpe, job in spins:
412 try:
413 result = result and future_result(job)
414 except KeyboardInterrupt:
415 interrupt_trigger[0] = False
416 finally:
417 tpe.shutdown(wait=True)
418 if not interrupt_trigger[0] and not is_live:
419 raise KeyboardInterrupt()
420 # we expect the user wants to stop and DO WANT the preceding postprocessors to run;
421 # so returning a intermediate result here instead of KeyboardInterrupt on live
422 return result
423
424 def download_and_append_fragments(
425 self, ctx, fragments, info_dict, *, pack_func=None, finish_func=None,
426 tpe=None, interrupt_trigger=None):
427 if not interrupt_trigger:
428 interrupt_trigger = (True, )
429
430 fragment_retries = self.params.get('fragment_retries', 0)
431 is_fatal = (
432 ((lambda _: False) if info_dict.get('is_live') else (lambda idx: idx == 0))
433 if self.params.get('skip_unavailable_fragments', True) else (lambda _: True))
434
435 if not pack_func:
436 pack_func = lambda frag_content, _: frag_content
437
438 def download_fragment(fragment, ctx):
439 if not interrupt_trigger[0]:
440 return
441
442 frag_index = ctx['fragment_index'] = fragment['frag_index']
443 ctx['last_error'] = None
444 headers = info_dict.get('http_headers', {}).copy()
445 byte_range = fragment.get('byte_range')
446 if byte_range:
447 headers['Range'] = 'bytes=%d-%d' % (byte_range['start'], byte_range['end'] - 1)
448
449 # Never skip the first fragment
450 fatal, count = is_fatal(fragment.get('index') or (frag_index - 1)), 0
451 while count <= fragment_retries:
452 try:
453 if self._download_fragment(ctx, fragment['url'], info_dict, headers):
454 break
455 return
456 except (compat_urllib_error.HTTPError, http.client.IncompleteRead) as err:
457 # Unavailable (possibly temporary) fragments may be served.
458 # First we try to retry then either skip or abort.
459 # See https://github.com/ytdl-org/youtube-dl/issues/10165,
460 # https://github.com/ytdl-org/youtube-dl/issues/10448).
461 count += 1
462 ctx['last_error'] = err
463 if count <= fragment_retries:
464 self.report_retry_fragment(err, frag_index, count, fragment_retries)
465 except DownloadError:
466 # Don't retry fragment if error occurred during HTTP downloading
467 # itself since it has own retry settings
468 if not fatal:
469 break
470 raise
471
472 if count > fragment_retries and fatal:
473 ctx['dest_stream'].close()
474 self.report_error('Giving up after %s fragment retries' % fragment_retries)
475
476 def append_fragment(frag_content, frag_index, ctx):
477 if frag_content:
478 self._append_fragment(ctx, pack_func(frag_content, frag_index))
479 elif not is_fatal(frag_index - 1):
480 self.report_skip_fragment(frag_index, 'fragment not found')
481 else:
482 ctx['dest_stream'].close()
483 self.report_error(f'fragment {frag_index} not found, unable to continue')
484 return False
485 return True
486
487 decrypt_fragment = self.decrypter(info_dict)
488
489 max_workers = math.ceil(
490 self.params.get('concurrent_fragment_downloads', 1) / ctx.get('max_progress', 1))
491 if max_workers > 1:
492 def _download_fragment(fragment):
493 ctx_copy = ctx.copy()
494 download_fragment(fragment, ctx_copy)
495 return fragment, fragment['frag_index'], ctx_copy.get('fragment_filename_sanitized')
496
497 self.report_warning('The download speed shown is only of one thread. This is a known issue and patches are welcome')
498 with tpe or concurrent.futures.ThreadPoolExecutor(max_workers) as pool:
499 try:
500 for fragment, frag_index, frag_filename in pool.map(_download_fragment, fragments):
501 ctx.update({
502 'fragment_filename_sanitized': frag_filename,
503 'fragment_index': frag_index,
504 })
505 if not append_fragment(decrypt_fragment(fragment, self._read_fragment(ctx)), frag_index, ctx):
506 return False
507 except KeyboardInterrupt:
508 self._finish_multiline_status()
509 self.report_error(
510 'Interrupted by user. Waiting for all threads to shutdown...', is_error=False, tb=False)
511 pool.shutdown(wait=False)
512 raise
513 else:
514 for fragment in fragments:
515 if not interrupt_trigger[0]:
516 break
517 try:
518 download_fragment(fragment, ctx)
519 result = append_fragment(
520 decrypt_fragment(fragment, self._read_fragment(ctx)), fragment['frag_index'], ctx)
521 except KeyboardInterrupt:
522 if info_dict.get('is_live'):
523 break
524 raise
525 if not result:
526 return False
527
528 if finish_func is not None:
529 ctx['dest_stream'].write(finish_func())
530 ctx['dest_stream'].flush()
531 self._finish_frag_download(ctx, info_dict)
532 return True