]> jfr.im git - yt-dlp.git/blob - yt_dlp/downloader/fragment.py
[cleanup] Add more ruff rules (#10149)
[yt-dlp.git] / yt_dlp / downloader / fragment.py
1 import concurrent.futures
2 import contextlib
3 import json
4 import math
5 import os
6 import struct
7 import time
8
9 from .common import FileDownloader
10 from .http import HttpFD
11 from ..aes import aes_cbc_decrypt_bytes, unpad_pkcs7
12 from ..compat import compat_os_name
13 from ..networking import Request
14 from ..networking.exceptions import HTTPError, IncompleteRead
15 from ..utils import DownloadError, RetryManager, encodeFilename, traverse_obj
16 from ..utils.networking import HTTPHeaderDict
17 from ..utils.progress import ProgressCalculator
18
19
20 class HttpQuietDownloader(HttpFD):
21 def to_screen(self, *args, **kargs):
22 pass
23
24 to_console_title = to_screen
25
26
27 class FragmentFD(FileDownloader):
28 """
29 A base file downloader class for fragmented media (e.g. f4m/m3u8 manifests).
30
31 Available options:
32
33 fragment_retries: Number of times to retry a fragment for HTTP error
34 (DASH and hlsnative only). Default is 0 for API, but 10 for CLI
35 skip_unavailable_fragments:
36 Skip unavailable fragments (DASH and hlsnative only)
37 keep_fragments: Keep downloaded fragments on disk after downloading is
38 finished
39 concurrent_fragment_downloads: The number of threads to use for native hls and dash downloads
40 _no_ytdl_file: Don't use .ytdl file
41
42 For each incomplete fragment download yt-dlp keeps on disk a special
43 bookkeeping file with download state and metadata (in future such files will
44 be used for any incomplete download handled by yt-dlp). This file is
45 used to properly handle resuming, check download file consistency and detect
46 potential errors. The file has a .ytdl extension and represents a standard
47 JSON file of the following format:
48
49 extractor:
50 Dictionary of extractor related data. TBD.
51
52 downloader:
53 Dictionary of downloader related data. May contain following data:
54 current_fragment:
55 Dictionary with current (being downloaded) fragment data:
56 index: 0-based index of current fragment among all fragments
57 fragment_count:
58 Total count of fragments
59
60 This feature is experimental and file format may change in future.
61 """
62
63 def report_retry_fragment(self, err, frag_index, count, retries):
64 self.deprecation_warning('yt_dlp.downloader.FragmentFD.report_retry_fragment is deprecated. '
65 'Use yt_dlp.downloader.FileDownloader.report_retry instead')
66 return self.report_retry(err, count, retries, frag_index)
67
68 def report_skip_fragment(self, frag_index, err=None):
69 err = f' {err};' if err else ''
70 self.to_screen(f'[download]{err} Skipping fragment {frag_index:d} ...')
71
72 def _prepare_url(self, info_dict, url):
73 headers = info_dict.get('http_headers')
74 return Request(url, None, headers) if headers else url
75
76 def _prepare_and_start_frag_download(self, ctx, info_dict):
77 self._prepare_frag_download(ctx)
78 self._start_frag_download(ctx, info_dict)
79
80 def __do_ytdl_file(self, ctx):
81 return ctx['live'] is not True and ctx['tmpfilename'] != '-' and not self.params.get('_no_ytdl_file')
82
83 def _read_ytdl_file(self, ctx):
84 assert 'ytdl_corrupt' not in ctx
85 stream, _ = self.sanitize_open(self.ytdl_filename(ctx['filename']), 'r')
86 try:
87 ytdl_data = json.loads(stream.read())
88 ctx['fragment_index'] = ytdl_data['downloader']['current_fragment']['index']
89 if 'extra_state' in ytdl_data['downloader']:
90 ctx['extra_state'] = ytdl_data['downloader']['extra_state']
91 except Exception:
92 ctx['ytdl_corrupt'] = True
93 finally:
94 stream.close()
95
96 def _write_ytdl_file(self, ctx):
97 frag_index_stream, _ = self.sanitize_open(self.ytdl_filename(ctx['filename']), 'w')
98 try:
99 downloader = {
100 'current_fragment': {
101 'index': ctx['fragment_index'],
102 },
103 }
104 if 'extra_state' in ctx:
105 downloader['extra_state'] = ctx['extra_state']
106 if ctx.get('fragment_count') is not None:
107 downloader['fragment_count'] = ctx['fragment_count']
108 frag_index_stream.write(json.dumps({'downloader': downloader}))
109 finally:
110 frag_index_stream.close()
111
112 def _download_fragment(self, ctx, frag_url, info_dict, headers=None, request_data=None):
113 fragment_filename = '%s-Frag%d' % (ctx['tmpfilename'], ctx['fragment_index'])
114 fragment_info_dict = {
115 'url': frag_url,
116 'http_headers': headers or info_dict.get('http_headers'),
117 'request_data': request_data,
118 'ctx_id': ctx.get('ctx_id'),
119 }
120 frag_resume_len = 0
121 if ctx['dl'].params.get('continuedl', True):
122 frag_resume_len = self.filesize_or_none(self.temp_name(fragment_filename))
123 fragment_info_dict['frag_resume_len'] = ctx['frag_resume_len'] = frag_resume_len
124
125 success, _ = ctx['dl'].download(fragment_filename, fragment_info_dict)
126 if not success:
127 return False
128 if fragment_info_dict.get('filetime'):
129 ctx['fragment_filetime'] = fragment_info_dict.get('filetime')
130 ctx['fragment_filename_sanitized'] = fragment_filename
131 return True
132
133 def _read_fragment(self, ctx):
134 if not ctx.get('fragment_filename_sanitized'):
135 return None
136 try:
137 down, frag_sanitized = self.sanitize_open(ctx['fragment_filename_sanitized'], 'rb')
138 except FileNotFoundError:
139 if ctx.get('live'):
140 return None
141 raise
142 ctx['fragment_filename_sanitized'] = frag_sanitized
143 frag_content = down.read()
144 down.close()
145 return frag_content
146
147 def _append_fragment(self, ctx, frag_content):
148 try:
149 ctx['dest_stream'].write(frag_content)
150 ctx['dest_stream'].flush()
151 finally:
152 if self.__do_ytdl_file(ctx):
153 self._write_ytdl_file(ctx)
154 if not self.params.get('keep_fragments', False):
155 self.try_remove(encodeFilename(ctx['fragment_filename_sanitized']))
156 del ctx['fragment_filename_sanitized']
157
158 def _prepare_frag_download(self, ctx):
159 if not ctx.setdefault('live', False):
160 total_frags_str = '%d' % ctx['total_frags']
161 ad_frags = ctx.get('ad_frags', 0)
162 if ad_frags:
163 total_frags_str += ' (not including %d ad)' % ad_frags
164 else:
165 total_frags_str = 'unknown (live)'
166 self.to_screen(f'[{self.FD_NAME}] Total fragments: {total_frags_str}')
167 self.report_destination(ctx['filename'])
168 dl = HttpQuietDownloader(self.ydl, {
169 **self.params,
170 'noprogress': True,
171 'test': False,
172 'sleep_interval': 0,
173 'max_sleep_interval': 0,
174 'sleep_interval_subtitles': 0,
175 })
176 tmpfilename = self.temp_name(ctx['filename'])
177 open_mode = 'wb'
178
179 # Establish possible resume length
180 resume_len = self.filesize_or_none(tmpfilename)
181 if resume_len > 0:
182 open_mode = 'ab'
183
184 # Should be initialized before ytdl file check
185 ctx.update({
186 'tmpfilename': tmpfilename,
187 'fragment_index': 0,
188 })
189
190 if self.__do_ytdl_file(ctx):
191 ytdl_file_exists = os.path.isfile(encodeFilename(self.ytdl_filename(ctx['filename'])))
192 continuedl = self.params.get('continuedl', True)
193 if continuedl and ytdl_file_exists:
194 self._read_ytdl_file(ctx)
195 is_corrupt = ctx.get('ytdl_corrupt') is True
196 is_inconsistent = ctx['fragment_index'] > 0 and resume_len == 0
197 if is_corrupt or is_inconsistent:
198 message = (
199 '.ytdl file is corrupt' if is_corrupt else
200 'Inconsistent state of incomplete fragment download')
201 self.report_warning(
202 f'{message}. Restarting from the beginning ...')
203 ctx['fragment_index'] = resume_len = 0
204 if 'ytdl_corrupt' in ctx:
205 del ctx['ytdl_corrupt']
206 self._write_ytdl_file(ctx)
207
208 else:
209 if not continuedl:
210 if ytdl_file_exists:
211 self._read_ytdl_file(ctx)
212 ctx['fragment_index'] = resume_len = 0
213 self._write_ytdl_file(ctx)
214 assert ctx['fragment_index'] == 0
215
216 dest_stream, tmpfilename = self.sanitize_open(tmpfilename, open_mode)
217
218 ctx.update({
219 'dl': dl,
220 'dest_stream': dest_stream,
221 'tmpfilename': tmpfilename,
222 # Total complete fragments downloaded so far in bytes
223 'complete_frags_downloaded_bytes': resume_len,
224 })
225
226 def _start_frag_download(self, ctx, info_dict):
227 resume_len = ctx['complete_frags_downloaded_bytes']
228 total_frags = ctx['total_frags']
229 ctx_id = ctx.get('ctx_id')
230 # Stores the download progress, updated by the progress hook
231 state = {
232 'status': 'downloading',
233 'downloaded_bytes': resume_len,
234 'fragment_index': ctx['fragment_index'],
235 'fragment_count': total_frags,
236 'filename': ctx['filename'],
237 'tmpfilename': ctx['tmpfilename'],
238 }
239
240 ctx['started'] = time.time()
241 progress = ProgressCalculator(resume_len)
242
243 def frag_progress_hook(s):
244 if s['status'] not in ('downloading', 'finished'):
245 return
246
247 if not total_frags and ctx.get('fragment_count'):
248 state['fragment_count'] = ctx['fragment_count']
249
250 if ctx_id is not None and s.get('ctx_id') != ctx_id:
251 return
252
253 state['max_progress'] = ctx.get('max_progress')
254 state['progress_idx'] = ctx.get('progress_idx')
255
256 state['elapsed'] = progress.elapsed
257 frag_total_bytes = s.get('total_bytes') or 0
258 s['fragment_info_dict'] = s.pop('info_dict', {})
259
260 # XXX: Fragment resume is not accounted for here
261 if not ctx['live']:
262 estimated_size = (
263 (ctx['complete_frags_downloaded_bytes'] + frag_total_bytes)
264 / (state['fragment_index'] + 1) * total_frags)
265 progress.total = estimated_size
266 progress.update(s.get('downloaded_bytes'))
267 state['total_bytes_estimate'] = progress.total
268 else:
269 progress.update(s.get('downloaded_bytes'))
270
271 if s['status'] == 'finished':
272 state['fragment_index'] += 1
273 ctx['fragment_index'] = state['fragment_index']
274 progress.thread_reset()
275
276 state['downloaded_bytes'] = ctx['complete_frags_downloaded_bytes'] = progress.downloaded
277 state['speed'] = ctx['speed'] = progress.speed.smooth
278 state['eta'] = progress.eta.smooth
279
280 self._hook_progress(state, info_dict)
281
282 ctx['dl'].add_progress_hook(frag_progress_hook)
283
284 return ctx['started']
285
286 def _finish_frag_download(self, ctx, info_dict):
287 ctx['dest_stream'].close()
288 if self.__do_ytdl_file(ctx):
289 self.try_remove(self.ytdl_filename(ctx['filename']))
290 elapsed = time.time() - ctx['started']
291
292 to_file = ctx['tmpfilename'] != '-'
293 if to_file:
294 downloaded_bytes = self.filesize_or_none(ctx['tmpfilename'])
295 else:
296 downloaded_bytes = ctx['complete_frags_downloaded_bytes']
297
298 if not downloaded_bytes:
299 if to_file:
300 self.try_remove(ctx['tmpfilename'])
301 self.report_error('The downloaded file is empty')
302 return False
303 elif to_file:
304 self.try_rename(ctx['tmpfilename'], ctx['filename'])
305 filetime = ctx.get('fragment_filetime')
306 if self.params.get('updatetime', True) and filetime:
307 with contextlib.suppress(Exception):
308 os.utime(ctx['filename'], (time.time(), filetime))
309
310 self._hook_progress({
311 'downloaded_bytes': downloaded_bytes,
312 'total_bytes': downloaded_bytes,
313 'filename': ctx['filename'],
314 'status': 'finished',
315 'elapsed': elapsed,
316 'ctx_id': ctx.get('ctx_id'),
317 'max_progress': ctx.get('max_progress'),
318 'progress_idx': ctx.get('progress_idx'),
319 }, info_dict)
320 return True
321
322 def _prepare_external_frag_download(self, ctx):
323 if 'live' not in ctx:
324 ctx['live'] = False
325 if not ctx['live']:
326 total_frags_str = '%d' % ctx['total_frags']
327 ad_frags = ctx.get('ad_frags', 0)
328 if ad_frags:
329 total_frags_str += ' (not including %d ad)' % ad_frags
330 else:
331 total_frags_str = 'unknown (live)'
332 self.to_screen(f'[{self.FD_NAME}] Total fragments: {total_frags_str}')
333
334 tmpfilename = self.temp_name(ctx['filename'])
335
336 # Should be initialized before ytdl file check
337 ctx.update({
338 'tmpfilename': tmpfilename,
339 'fragment_index': 0,
340 })
341
342 def decrypter(self, info_dict):
343 _key_cache = {}
344
345 def _get_key(url):
346 if url not in _key_cache:
347 _key_cache[url] = self.ydl.urlopen(self._prepare_url(info_dict, url)).read()
348 return _key_cache[url]
349
350 def decrypt_fragment(fragment, frag_content):
351 if frag_content is None:
352 return
353 decrypt_info = fragment.get('decrypt_info')
354 if not decrypt_info or decrypt_info['METHOD'] != 'AES-128':
355 return frag_content
356 iv = decrypt_info.get('IV') or struct.pack('>8xq', fragment['media_sequence'])
357 decrypt_info['KEY'] = (decrypt_info.get('KEY')
358 or _get_key(traverse_obj(info_dict, ('hls_aes', 'uri')) or decrypt_info['URI']))
359 # Don't decrypt the content in tests since the data is explicitly truncated and it's not to a valid block
360 # size (see https://github.com/ytdl-org/youtube-dl/pull/27660). Tests only care that the correct data downloaded,
361 # not what it decrypts to.
362 if self.params.get('test', False):
363 return frag_content
364 return unpad_pkcs7(aes_cbc_decrypt_bytes(frag_content, decrypt_info['KEY'], iv))
365
366 return decrypt_fragment
367
368 def download_and_append_fragments_multiple(self, *args, **kwargs):
369 """
370 @params (ctx1, fragments1, info_dict1), (ctx2, fragments2, info_dict2), ...
371 all args must be either tuple or list
372 """
373 interrupt_trigger = [True]
374 max_progress = len(args)
375 if max_progress == 1:
376 return self.download_and_append_fragments(*args[0], **kwargs)
377 max_workers = self.params.get('concurrent_fragment_downloads', 1)
378 if max_progress > 1:
379 self._prepare_multiline_status(max_progress)
380 is_live = any(traverse_obj(args, (..., 2, 'is_live')))
381
382 def thread_func(idx, ctx, fragments, info_dict, tpe):
383 ctx['max_progress'] = max_progress
384 ctx['progress_idx'] = idx
385 return self.download_and_append_fragments(
386 ctx, fragments, info_dict, **kwargs, tpe=tpe, interrupt_trigger=interrupt_trigger)
387
388 class FTPE(concurrent.futures.ThreadPoolExecutor):
389 # has to stop this or it's going to wait on the worker thread itself
390 def __exit__(self, exc_type, exc_val, exc_tb):
391 pass
392
393 if compat_os_name == 'nt':
394 def future_result(future):
395 while True:
396 try:
397 return future.result(0.1)
398 except KeyboardInterrupt:
399 raise
400 except concurrent.futures.TimeoutError:
401 continue
402 else:
403 def future_result(future):
404 return future.result()
405
406 def interrupt_trigger_iter(fg):
407 for f in fg:
408 if not interrupt_trigger[0]:
409 break
410 yield f
411
412 spins = []
413 for idx, (ctx, fragments, info_dict) in enumerate(args):
414 tpe = FTPE(math.ceil(max_workers / max_progress))
415 job = tpe.submit(thread_func, idx, ctx, interrupt_trigger_iter(fragments), info_dict, tpe)
416 spins.append((tpe, job))
417
418 result = True
419 for tpe, job in spins:
420 try:
421 result = result and future_result(job)
422 except KeyboardInterrupt:
423 interrupt_trigger[0] = False
424 finally:
425 tpe.shutdown(wait=True)
426 if not interrupt_trigger[0] and not is_live:
427 raise KeyboardInterrupt
428 # we expect the user wants to stop and DO WANT the preceding postprocessors to run;
429 # so returning a intermediate result here instead of KeyboardInterrupt on live
430 return result
431
432 def download_and_append_fragments(
433 self, ctx, fragments, info_dict, *, is_fatal=(lambda idx: False),
434 pack_func=(lambda content, idx: content), finish_func=None,
435 tpe=None, interrupt_trigger=(True, )):
436
437 if not self.params.get('skip_unavailable_fragments', True):
438 is_fatal = lambda _: True
439
440 def download_fragment(fragment, ctx):
441 if not interrupt_trigger[0]:
442 return
443
444 frag_index = ctx['fragment_index'] = fragment['frag_index']
445 ctx['last_error'] = None
446 headers = HTTPHeaderDict(info_dict.get('http_headers'))
447 byte_range = fragment.get('byte_range')
448 if byte_range:
449 headers['Range'] = 'bytes=%d-%d' % (byte_range['start'], byte_range['end'] - 1)
450
451 # Never skip the first fragment
452 fatal = is_fatal(fragment.get('index') or (frag_index - 1))
453
454 def error_callback(err, count, retries):
455 if fatal and count > retries:
456 ctx['dest_stream'].close()
457 self.report_retry(err, count, retries, frag_index, fatal)
458 ctx['last_error'] = err
459
460 for retry in RetryManager(self.params.get('fragment_retries'), error_callback):
461 try:
462 ctx['fragment_count'] = fragment.get('fragment_count')
463 if not self._download_fragment(
464 ctx, fragment['url'], info_dict, headers, info_dict.get('request_data')):
465 return
466 except (HTTPError, IncompleteRead) as err:
467 retry.error = err
468 continue
469 except DownloadError: # has own retry settings
470 if fatal:
471 raise
472
473 def append_fragment(frag_content, frag_index, ctx):
474 if frag_content:
475 self._append_fragment(ctx, pack_func(frag_content, frag_index))
476 elif not is_fatal(frag_index - 1):
477 self.report_skip_fragment(frag_index, 'fragment not found')
478 else:
479 ctx['dest_stream'].close()
480 self.report_error(f'fragment {frag_index} not found, unable to continue')
481 return False
482 return True
483
484 decrypt_fragment = self.decrypter(info_dict)
485
486 max_workers = math.ceil(
487 self.params.get('concurrent_fragment_downloads', 1) / ctx.get('max_progress', 1))
488 if max_workers > 1:
489 def _download_fragment(fragment):
490 ctx_copy = ctx.copy()
491 download_fragment(fragment, ctx_copy)
492 return fragment, fragment['frag_index'], ctx_copy.get('fragment_filename_sanitized')
493
494 with tpe or concurrent.futures.ThreadPoolExecutor(max_workers) as pool:
495 try:
496 for fragment, frag_index, frag_filename in pool.map(_download_fragment, fragments):
497 ctx.update({
498 'fragment_filename_sanitized': frag_filename,
499 'fragment_index': frag_index,
500 })
501 if not append_fragment(decrypt_fragment(fragment, self._read_fragment(ctx)), frag_index, ctx):
502 return False
503 except KeyboardInterrupt:
504 self._finish_multiline_status()
505 self.report_error(
506 'Interrupted by user. Waiting for all threads to shutdown...', is_error=False, tb=False)
507 pool.shutdown(wait=False)
508 raise
509 else:
510 for fragment in fragments:
511 if not interrupt_trigger[0]:
512 break
513 try:
514 download_fragment(fragment, ctx)
515 result = append_fragment(
516 decrypt_fragment(fragment, self._read_fragment(ctx)), fragment['frag_index'], ctx)
517 except KeyboardInterrupt:
518 if info_dict.get('is_live'):
519 break
520 raise
521 if not result:
522 return False
523
524 if finish_func is not None:
525 ctx['dest_stream'].write(finish_func())
526 ctx['dest_stream'].flush()
527 return self._finish_frag_download(ctx, info_dict)