import concurrent.futures
import contextlib
-import http.client
import json
import math
import os
import struct
import time
-import urllib.error
from .common import FileDownloader
from .http import HttpFD
from ..aes import aes_cbc_decrypt_bytes, unpad_pkcs7
from ..compat import compat_os_name
-from ..utils import (
- DownloadError,
- RetryManager,
- encodeFilename,
- sanitized_Request,
- traverse_obj,
-)
+from ..networking import Request
+from ..networking.exceptions import HTTPError, IncompleteRead
+from ..utils import DownloadError, RetryManager, encodeFilename, traverse_obj
+from ..utils.networking import HTTPHeaderDict
+from ..utils.progress import ProgressCalculator
class HttpQuietDownloader(HttpFD):
def _prepare_url(self, info_dict, url):
headers = info_dict.get('http_headers')
- return sanitized_Request(url, None, headers) if headers else url
+ return Request(url, None, headers) if headers else url
def _prepare_and_start_frag_download(self, ctx, info_dict):
self._prepare_frag_download(ctx)
resume_len = ctx['complete_frags_downloaded_bytes']
total_frags = ctx['total_frags']
ctx_id = ctx.get('ctx_id')
- # This dict stores the download progress, it's updated by the progress
- # hook
+ # Stores the download progress, updated by the progress hook
state = {
'status': 'downloading',
'downloaded_bytes': resume_len,
'tmpfilename': ctx['tmpfilename'],
}
- start = time.time()
- ctx.update({
- 'started': start,
- 'fragment_started': start,
- # Amount of fragment's bytes downloaded by the time of the previous
- # frag progress hook invocation
- 'prev_frag_downloaded_bytes': 0,
- })
+ ctx['started'] = time.time()
+ progress = ProgressCalculator(resume_len)
def frag_progress_hook(s):
if s['status'] not in ('downloading', 'finished'):
state['max_progress'] = ctx.get('max_progress')
state['progress_idx'] = ctx.get('progress_idx')
- time_now = time.time()
- state['elapsed'] = time_now - start
+ state['elapsed'] = progress.elapsed
frag_total_bytes = s.get('total_bytes') or 0
s['fragment_info_dict'] = s.pop('info_dict', {})
+
+ # XXX: Fragment resume is not accounted for here
if not ctx['live']:
estimated_size = (
(ctx['complete_frags_downloaded_bytes'] + frag_total_bytes)
/ (state['fragment_index'] + 1) * total_frags)
- state['total_bytes_estimate'] = estimated_size
+ progress.total = estimated_size
+ progress.update(s.get('downloaded_bytes'))
+ state['total_bytes_estimate'] = progress.total
+ else:
+ progress.update(s.get('downloaded_bytes'))
if s['status'] == 'finished':
state['fragment_index'] += 1
ctx['fragment_index'] = state['fragment_index']
- state['downloaded_bytes'] += frag_total_bytes - ctx['prev_frag_downloaded_bytes']
- ctx['complete_frags_downloaded_bytes'] = state['downloaded_bytes']
- ctx['speed'] = state['speed'] = self.calc_speed(
- ctx['fragment_started'], time_now, frag_total_bytes)
- ctx['fragment_started'] = time.time()
- ctx['prev_frag_downloaded_bytes'] = 0
- else:
- frag_downloaded_bytes = s['downloaded_bytes']
- state['downloaded_bytes'] += frag_downloaded_bytes - ctx['prev_frag_downloaded_bytes']
- ctx['speed'] = state['speed'] = self.calc_speed(
- ctx['fragment_started'], time_now, frag_downloaded_bytes - ctx.get('frag_resume_len', 0))
- if not ctx['live']:
- state['eta'] = self.calc_eta(state['speed'], estimated_size - state['downloaded_bytes'])
- ctx['prev_frag_downloaded_bytes'] = frag_downloaded_bytes
+ progress.thread_reset()
+
+ state['downloaded_bytes'] = ctx['complete_frags_downloaded_bytes'] = progress.downloaded
+ state['speed'] = ctx['speed'] = progress.speed.smooth
+ state['eta'] = progress.eta.smooth
+
self._hook_progress(state, info_dict)
ctx['dl'].add_progress_hook(frag_progress_hook)
- return start
+ return ctx['started']
def _finish_frag_download(self, ctx, info_dict):
ctx['dest_stream'].close()
if self.__do_ytdl_file(ctx):
- ytdl_filename = encodeFilename(self.ytdl_filename(ctx['filename']))
- if os.path.isfile(ytdl_filename):
- self.try_remove(ytdl_filename)
+ self.try_remove(self.ytdl_filename(ctx['filename']))
elapsed = time.time() - ctx['started']
to_file = ctx['tmpfilename'] != '-'
frag_index = ctx['fragment_index'] = fragment['frag_index']
ctx['last_error'] = None
- headers = info_dict.get('http_headers', {}).copy()
+ headers = HTTPHeaderDict(info_dict.get('http_headers'))
byte_range = fragment.get('byte_range')
if byte_range:
headers['Range'] = 'bytes=%d-%d' % (byte_range['start'], byte_range['end'] - 1)
if not self._download_fragment(
ctx, fragment['url'], info_dict, headers, info_dict.get('request_data')):
return
- except (urllib.error.HTTPError, http.client.IncompleteRead) as err:
+ except (HTTPError, IncompleteRead) as err:
retry.error = err
continue
except DownloadError: # has own retry settings
download_fragment(fragment, ctx_copy)
return fragment, fragment['frag_index'], ctx_copy.get('fragment_filename_sanitized')
- self.report_warning('The download speed shown is only of one thread. This is a known issue')
with tpe or concurrent.futures.ThreadPoolExecutor(max_workers) as pool:
try:
for fragment, frag_index, frag_filename in pool.map(_download_fragment, fragments):