X-Git-Url: https://jfr.im/git/yt-dlp.git/blobdiff_plain/75dc8e673b481a82d0688aeec30f6c65d82bb359..c305a25c1b16bcf7a5ec499c3b786ed1e2c748da:/yt_dlp/YoutubeDL.py diff --git a/yt_dlp/YoutubeDL.py b/yt_dlp/YoutubeDL.py index 324f9e99c..291fc8d00 100644 --- a/yt_dlp/YoutubeDL.py +++ b/yt_dlp/YoutubeDL.py @@ -1,7 +1,7 @@ import collections import contextlib import copy -import datetime +import datetime as dt import errno import fileinput import http.cookiejar @@ -34,15 +34,15 @@ from .extractor.openload import PhantomJSwrapper from .minicurses import format_text from .networking import HEADRequest, Request, RequestDirector -from .networking.common import _REQUEST_HANDLERS +from .networking.common import _REQUEST_HANDLERS, _RH_PREFERENCES from .networking.exceptions import ( HTTPError, NoSupportingHandlers, RequestError, SSLError, - _CompatHTTPError, network_exceptions, ) +from .networking.impersonate import ImpersonateRequestHandler from .plugins import directories as plugin_directories from .postprocessor import _PLUGIN_CLASSES as plugin_pps from .postprocessor import ( @@ -60,7 +60,13 @@ get_postprocessor, ) from .postprocessor.ffmpeg import resolve_mapping as resolve_recode_mapping -from .update import REPOSITORY, current_git_head, detect_variant +from .update import ( + REPOSITORY, + _get_system_deprecation, + _make_label, + current_git_head, + detect_variant, +) from .utils import ( DEFAULT_OUTTMPL, IDENTITY, @@ -94,6 +100,7 @@ SameFileError, UnavailableVideoError, UserNotLive, + YoutubeDLError, age_restricted, args_to_str, bug_reports_message, @@ -139,6 +146,7 @@ subtitles_filename, supports_terminal_sequences, system_identifier, + filesize_from_tbr, timetuple_from_msec, to_high_limit_path, traverse_obj, @@ -158,7 +166,7 @@ clean_proxies, std_headers, ) -from .version import CHANNEL, RELEASE_GIT_HEAD, VARIANT, __version__ +from .version import CHANNEL, ORIGIN, RELEASE_GIT_HEAD, VARIANT, __version__ if compat_os_name == 'nt': import ctypes @@ -239,9 +247,9 @@ class YoutubeDL: 'selected' (check selected formats), or None (check only if requested by extractor) paths: Dictionary of output paths. The allowed keys are 'home' - 'temp' and the keys of OUTTMPL_TYPES (in utils.py) + 'temp' and the keys of OUTTMPL_TYPES (in utils/_utils.py) outtmpl: Dictionary of templates for output names. Allowed keys - are 'default' and the keys of OUTTMPL_TYPES (in utils.py). + are 'default' and the keys of OUTTMPL_TYPES (in utils/_utils.py). For compatibility with youtube-dl, a single string can also be used outtmpl_na_placeholder: Placeholder for unavailable meta fields. restrictfilenames: Do not allow "&" and spaces in file names @@ -256,8 +264,6 @@ class YoutubeDL: overwrites: Overwrite all video and metadata files if True, overwrite only non-video files if None and don't overwrite any file if False - For compatibility with youtube-dl, - "nooverwrites" may also be used instead playlist_items: Specific indices of playlist to download. playlistrandom: Download playlist items in random order. lazy_playlist: Process playlist entries as they are received. @@ -399,6 +405,8 @@ class YoutubeDL: - "detect_or_warn": check whether we can do anything about it, warn otherwise (default) source_address: Client-side IP address to bind to. + impersonate: Client to impersonate for requests. + An ImpersonateTarget (from yt_dlp.networking.impersonate) sleep_interval_requests: Number of seconds to sleep between requests during extraction sleep_interval: Number of seconds to sleep before each download when @@ -424,7 +432,7 @@ class YoutubeDL: asked whether to download the video. - Raise utils.DownloadCancelled(msg) to abort remaining downloads when a video is rejected. - match_filter_func in utils.py is one example for this. + match_filter_func in utils/_utils.py is one example for this. color: A Dictionary with output stream names as keys and their respective color policy as values. Can also just be a single color policy, @@ -553,6 +561,7 @@ class YoutubeDL: You can reduce network I/O by disabling it if you don't care about HLS. (only for youtube) no_color: Same as `color='no_color'` + no_overwrites: Same as `overwrites=False` """ _NUMERIC_FIELDS = { @@ -571,11 +580,18 @@ class YoutubeDL: 'url', 'manifest_url', 'manifest_stream_number', 'ext', 'format', 'format_id', 'format_note', 'width', 'height', 'aspect_ratio', 'resolution', 'dynamic_range', 'tbr', 'abr', 'acodec', 'asr', 'audio_channels', 'vbr', 'fps', 'vcodec', 'container', 'filesize', 'filesize_approx', 'rows', 'columns', - 'player_url', 'protocol', 'fragment_base_url', 'fragments', 'is_from_start', - 'preference', 'language', 'language_preference', 'quality', 'source_preference', + 'player_url', 'protocol', 'fragment_base_url', 'fragments', 'is_from_start', 'is_dash_periods', 'request_data', + 'preference', 'language', 'language_preference', 'quality', 'source_preference', 'cookies', 'http_headers', 'stretched_ratio', 'no_resume', 'has_drm', 'extra_param_to_segment_url', 'hls_aes', 'downloader_options', 'page_url', 'app', 'play_path', 'tc_url', 'flash_version', 'rtmp_live', 'rtmp_conn', 'rtmp_protocol', 'rtmp_real_time' } + _deprecated_multivalue_fields = { + 'album_artist': 'album_artists', + 'artist': 'artists', + 'composer': 'composers', + 'creator': 'creators', + 'genre': 'genres', + } _format_selection_exts = { 'audio': set(MEDIA_EXTENSIONS.common_audio), 'video': set(MEDIA_EXTENSIONS.common_video + ('3gp', )), @@ -604,6 +620,7 @@ def __init__(self, params=None, auto_init=True): self._playlist_level = 0 self._playlist_urls = set() self.cache = Cache(self) + self.__header_cookies = [] stdout = sys.stderr if self.params.get('logtostderr') else sys.stdout self._out_files = Namespace( @@ -621,17 +638,21 @@ def __init__(self, params=None, auto_init=True): if self.params.get('no_color'): if self.params.get('color') is not None: - self.report_warning('Overwriting params from "color" with "no_color"') + self.params.setdefault('_warnings', []).append( + 'Overwriting params from "color" with "no_color"') self.params['color'] = 'no_color' - term_allow_color = os.environ.get('TERM', '').lower() != 'dumb' + term_allow_color = os.getenv('TERM', '').lower() != 'dumb' + no_color = bool(os.getenv('NO_COLOR')) def process_color_policy(stream): stream_name = {sys.stdout: 'stdout', sys.stderr: 'stderr'}[stream] policy = traverse_obj(self.params, ('color', (stream_name, None), {str}), get_all=False) if policy in ('auto', None): - return term_allow_color and supports_terminal_sequences(stream) - assert policy in ('always', 'never', 'no_color') + if term_allow_color and supports_terminal_sequences(stream): + return 'no_color' if no_color else True + return False + assert policy in ('always', 'never', 'no_color'), policy return {'always': True, 'never': False}.get(policy, policy) self._allow_colors = Namespace(**{ @@ -639,17 +660,9 @@ def process_color_policy(stream): for name, stream in self._out_files.items_ if name != 'console' }) - # The code is left like this to be reused for future deprecations - MIN_SUPPORTED, MIN_RECOMMENDED = (3, 7), (3, 7) - current_version = sys.version_info[:2] - if current_version < MIN_RECOMMENDED: - msg = ('Support for Python version %d.%d has been deprecated. ' - 'See https://github.com/yt-dlp/yt-dlp/issues/3764 for more details.' - '\n You will no longer receive updates on this version') - if current_version < MIN_SUPPORTED: - msg = 'Python version %d.%d is no longer supported' - self.deprecated_feature( - f'{msg}! Please update to Python %d.%d or above' % (*current_version, *MIN_RECOMMENDED)) + system_deprecation = _get_system_deprecation() + if system_deprecation: + self.deprecated_feature(system_deprecation.replace('\n', '\n ')) if self.params.get('allow_unplayable_formats'): self.report_warning( @@ -680,12 +693,9 @@ def process_color_policy(stream): self.params['compat_opts'] = set(self.params.get('compat_opts', ())) self.params['http_headers'] = HTTPHeaderDict(std_headers, self.params.get('http_headers')) - self.__header_cookies = [] self._load_cookies(self.params['http_headers'].get('Cookie')) # compat self.params['http_headers'].pop('Cookie', None) - self._request_director = self.build_request_director( - sorted(_REQUEST_HANDLERS.values(), key=lambda rh: rh.RH_NAME.lower())) if auto_init and auto_init != 'no_verbose_header': self.print_debug_header() @@ -708,6 +718,13 @@ def check_deprecated(param, option, suggestion): for msg in self.params.get('_deprecation_warnings', []): self.deprecated_feature(msg) + if impersonate_target := self.params.get('impersonate'): + if not self._impersonate_target_available(impersonate_target): + raise YoutubeDLError( + f'Impersonate target "{impersonate_target}" is not available. ' + f'Use --list-impersonate-targets to see available targets. ' + f'You may be missing dependencies required to support this target.') + if 'list-formats' in self.params['compat_opts']: self.params['listformats_table'] = False @@ -949,7 +966,7 @@ def __enter__(self): def save_cookies(self): if self.params.get('cookiefile') is not None: - self.cookiejar.save(ignore_discard=True, ignore_expires=True) + self.cookiejar.save() def __exit__(self, *args): self.restore_console_title() @@ -957,7 +974,9 @@ def __exit__(self, *args): def close(self): self.save_cookies() - self._request_director.close() + if '_request_director' in self.__dict__: + self._request_director.close() + del self._request_director def trouble(self, message=None, tb=None, is_error=True): """Determine action to take when a download problem appears. @@ -1185,6 +1204,7 @@ def prepare_outtmpl(self, outtmpl, info_dict, sanitize=False): MATH_FUNCTIONS = { '+': float.__add__, '-': float.__sub__, + '*': float.__mul__, } # Field is of the form key1.key2... # where keys (except first) can be string, int, slice or "{field, ...}" @@ -1206,6 +1226,15 @@ def prepare_outtmpl(self, outtmpl, info_dict, sanitize=False): (?:\|(?P.*?))? )$''') + def _from_user_input(field): + if field == ':': + return ... + elif ':' in field: + return slice(*map(int_or_none, field.split(':'))) + elif int_or_none(field) is not None: + return int(field) + return field + def _traverse_infodict(fields): fields = [f for x in re.split(r'\.({.+?})\.?', fields) for f in ([x] if x.startswith('{') else x.split('.'))] @@ -1215,11 +1244,12 @@ def _traverse_infodict(fields): for i, f in enumerate(fields): if not f.startswith('{'): + fields[i] = _from_user_input(f) continue assert f.endswith('}'), f'No closing brace for {f} in {fields}' - fields[i] = {k: k.split('.') for k in f[1:-1].split(',')} + fields[i] = {k: list(map(_from_user_input, k.split('.'))) for k in f[1:-1].split(',')} - return traverse_obj(info_dict, fields, is_user_input=True, traverse_string=True) + return traverse_obj(info_dict, fields, traverse_string=True) def get_value(mdict): # Object traversal @@ -1300,16 +1330,16 @@ def create_key(outer_mobj): else: break - fmt = outer_mobj.group('format') - if fmt == 's' and value is not None and last_field in field_size_compat_map.keys(): - fmt = f'0{field_size_compat_map[last_field]:d}d' - if None not in (value, replacement): try: value = replacement_formatter.format(replacement, value) except ValueError: value, default = None, na + fmt = outer_mobj.group('format') + if fmt == 's' and last_field in field_size_compat_map.keys() and isinstance(value, int): + fmt = f'0{field_size_compat_map[last_field]:d}d' + flags = outer_mobj.group('conversion') or '' str_fmt = f'{fmt[:-1]}s' if value is None: @@ -2209,7 +2239,7 @@ def _parse_format_selection(tokens, inside_merge=False, inside_choice=False, ins selectors = [] current_selector = None for type, string_, start, _, _ in tokens: - # ENCODING is only defined in python 3.x + # ENCODING is only defined in Python 3.x if type == getattr(tokenize, 'ENCODING', None): continue elif type in [tokenize.NAME, tokenize.NUMBER]: @@ -2338,16 +2368,16 @@ def _merge(formats_pair): return new_dict def _check_formats(formats): - if (self.params.get('check_formats') is not None + if self.params.get('check_formats') == 'selected': + yield from self._check_formats(formats) + return + elif (self.params.get('check_formats') is not None or self.params.get('allow_unplayable_formats')): yield from formats return - elif self.params.get('check_formats') == 'selected': - yield from self._check_formats(formats) - return for f in formats: - if f.get('has_drm'): + if f.get('has_drm') or f.get('__needs_testing'): yield from self._check_formats([f]) else: yield f @@ -2441,7 +2471,7 @@ def selector_function(ctx): # for extractors with incomplete formats (audio only (soundcloud) # or video only (imgur)) best/worst will fallback to # best/worst {video,audio}-only format - matches = formats + matches = list(filter(lambda f: f.get('vcodec') != 'none' or f.get('acodec') != 'none', formats)) elif seperate_fallback and not ctx['has_merged_format']: # for compatibility with youtube-dl when there is no pre-merged format matches = list(filter(seperate_fallback, formats)) @@ -2460,9 +2490,16 @@ def final_selector(ctx): return selector_function(ctx_copy) return final_selector - stream = io.BytesIO(format_spec.encode()) + # HACK: Python 3.12 changed the underlying parser, rendering '7_a' invalid + # Prefix numbers with random letters to avoid it being classified as a number + # See: https://github.com/yt-dlp/yt-dlp/pulls/8797 + # TODO: Implement parser not reliant on tokenize.tokenize + prefix = ''.join(random.choices(string.ascii_letters, k=32)) + stream = io.BytesIO(re.sub(r'\d[_\d]*', rf'{prefix}\g<0>', format_spec).encode()) try: - tokens = list(_remove_unused_ops(tokenize.tokenize(stream.readline))) + tokens = list(_remove_unused_ops( + token._replace(string=token.string.replace(prefix, '')) + for token in tokenize.tokenize(stream.readline))) except tokenize.TokenError: raise syntax_error('Missing closing/opening brackets or parenthesis', (0, len(format_spec))) @@ -2592,9 +2629,12 @@ def _fill_common_fields(self, info_dict, final=True): # Working around out-of-range timestamp values (e.g. negative ones on Windows, # see http://bugs.python.org/issue1646728) with contextlib.suppress(ValueError, OverflowError, OSError): - upload_date = datetime.datetime.utcfromtimestamp(info_dict[ts_key]) + upload_date = dt.datetime.fromtimestamp(info_dict[ts_key], dt.timezone.utc) info_dict[date_key] = upload_date.strftime('%Y%m%d') + if not info_dict.get('release_year'): + info_dict['release_year'] = traverse_obj(info_dict, ('release_date', {lambda x: int(x[:4])})) + live_keys = ('is_live', 'was_live') live_status = info_dict.get('live_status') if live_status is None: @@ -2620,6 +2660,15 @@ def _fill_common_fields(self, info_dict, final=True): if final and info_dict.get('%s_number' % field) is not None and not info_dict.get(field): info_dict[field] = '%s %d' % (field.capitalize(), info_dict['%s_number' % field]) + for old_key, new_key in self._deprecated_multivalue_fields.items(): + if new_key in info_dict and old_key in info_dict: + if '_version' not in info_dict: # HACK: Do not warn when using --load-info-json + self.deprecation_warning(f'Do not return {old_key!r} when {new_key!r} is present') + elif old_value := info_dict.get(old_key): + info_dict[new_key] = old_value.split(', ') + elif new_value := info_dict.get(new_key): + info_dict[old_key] = ', '.join(v.replace(',', '\N{FULLWIDTH COMMA}') for v in new_value) + def _raise_pending_errors(self, info): err = info.pop('__pending_error', None) if err: @@ -2734,7 +2783,7 @@ def sanitize_numeric_fields(info): get_from_start = not info_dict.get('is_live') or bool(self.params.get('live_from_start')) if not get_from_start: - info_dict['title'] += ' ' + datetime.datetime.now().strftime('%Y-%m-%d %H:%M') + info_dict['title'] += ' ' + dt.datetime.now().strftime('%Y-%m-%d %H:%M') if info_dict.get('is_live') and formats: formats = [f for f in formats if bool(f.get('is_from_start')) == get_from_start] if get_from_start and not formats: @@ -2765,6 +2814,9 @@ def is_wellformed(f): format['url'] = sanitize_url(format['url']) if format.get('ext') is None: format['ext'] = determine_ext(format['url']).lower() + if format['ext'] in ('aac', 'opus', 'mp3', 'flac', 'vorbis'): + if format.get('acodec') is None: + format['acodec'] = format['ext'] if format.get('protocol') is None: format['protocol'] = determine_protocol(format) if format.get('resolution') is None: @@ -2773,10 +2825,10 @@ def is_wellformed(f): format['dynamic_range'] = 'SDR' if format.get('aspect_ratio') is None: format['aspect_ratio'] = try_call(lambda: round(format['width'] / format['height'], 2)) - if (not format.get('manifest_url') # For fragmented formats, "tbr" is often max bitrate and not average - and info_dict.get('duration') and format.get('tbr') + # For fragmented formats, "tbr" is often max bitrate and not average + if (('manifest-filesize-approx' in self.params['compat_opts'] or not format.get('manifest_url')) and not format.get('filesize') and not format.get('filesize_approx')): - format['filesize_approx'] = int(info_dict['duration'] * format['tbr'] * (1024 / 8)) + format['filesize_approx'] = filesize_from_tbr(format.get('tbr'), info_dict.get('duration')) format['http_headers'] = self._calc_headers(collections.ChainMap(format, info_dict), load_cookies=True) # Safeguard against old/insecure infojson when using --load-info-json @@ -3290,7 +3342,7 @@ def existing_video_file(*filepaths): fd, success = None, True if info_dict.get('protocol') or info_dict.get('url'): fd = get_suitable_downloader(info_dict, self.params, to_stdout=temp_filename == '-') - if fd is not FFmpegFD and 'no-direct-merge' not in self.params['compat_opts'] and ( + if fd != FFmpegFD and 'no-direct-merge' not in self.params['compat_opts'] and ( info_dict.get('section_start') or info_dict.get('section_end')): msg = ('This format cannot be partially downloaded' if FFmpegFD.available() else 'You have requested downloading the video partially, but ffmpeg is not installed') @@ -3454,14 +3506,16 @@ def ffmpeg_fixup(cndn, msg, cls): ) for pp in self._pps['post_process']) if not postprocessed_by_ffmpeg: - ffmpeg_fixup(ext == 'm4a' and info_dict.get('container') == 'm4a_dash', + ffmpeg_fixup(fd != FFmpegFD and ext == 'm4a' + and info_dict.get('container') == 'm4a_dash', 'writing DASH m4a. Only some players support this container', FFmpegFixupM4aPP) ffmpeg_fixup(downloader == 'hlsnative' and not self.params.get('hls_use_mpegts') or info_dict.get('is_live') and self.params.get('hls_use_mpegts') is None, 'Possible MPEG-TS in MP4 container or malformed AAC timestamps', FFmpegFixupM3u8PP) - ffmpeg_fixup(info_dict.get('is_live') and downloader == 'dashsegments', + ffmpeg_fixup(downloader == 'dashsegments' + and (info_dict.get('is_live') or info_dict.get('is_dash_periods')), 'Possible duplicate MOOV atoms', FFmpegFixupDuplicateMoovPP) ffmpeg_fixup(downloader == 'web_socket_fragment', 'Malformed timestamps detected', FFmpegFixupTimestampPP) @@ -3538,6 +3592,8 @@ def download_with_info_file(self, info_filename): raise self.report_warning(f'The info failed to download: {e}; trying with URL {webpage_url}') self.download([webpage_url]) + except ExtractorError as e: + self.report_error(e) return self._download_retcode @staticmethod @@ -3551,14 +3607,14 @@ def sanitize_info(info_dict, remove_private_keys=False): 'version': __version__, 'current_git_head': current_git_head(), 'release_git_head': RELEASE_GIT_HEAD, - 'repository': REPOSITORY, + 'repository': ORIGIN, }) if remove_private_keys: reject = lambda k, v: v is None or k.startswith('__') or k in { 'requested_downloads', 'requested_formats', 'requested_subtitles', 'requested_entries', 'entries', 'filepath', '_filename', 'filename', 'infojson_filename', 'original_url', - 'playlist_autonumber', '_format_sort_fields', + 'playlist_autonumber', } else: reject = lambda k, v: False @@ -3822,8 +3878,8 @@ def simplified_codec(f, field): delim, ( format_field(f, 'filesize', ' \t%s', func=format_bytes) or format_field(f, 'filesize_approx', '≈\t%s', func=format_bytes) - or format_field(try_call(lambda: format_bytes(int(info_dict['duration'] * f['tbr'] * (1024 / 8)))), - None, self._format_out('~\t%s', self.Styles.SUPPRESS))), + or format_field(filesize_from_tbr(f.get('tbr'), info_dict.get('duration')), None, + self._format_out('~\t%s', self.Styles.SUPPRESS), func=format_bytes)), format_field(f, 'tbr', '\t%dk', func=round), shorten_protocol_name(f.get('protocol', '')), delim, @@ -3934,8 +3990,8 @@ def get_encoding(stream): source += '*' klass = type(self) write_debug(join_nonempty( - f'{"yt-dlp" if REPOSITORY == "yt-dlp/yt-dlp" else REPOSITORY} version', - f'{CHANNEL}@{__version__}', + f'{REPOSITORY.rpartition("/")[2]} version', + _make_label(ORIGIN, CHANNEL.partition('@')[2] or __version__, __version__), f'[{RELEASE_GIT_HEAD[:9]}]' if RELEASE_GIT_HEAD else '', '' if source == 'unknown' else f'({source})', '' if _IN_CLI else 'API' if klass == YoutubeDL else f'API:{self.__module__}.{klass.__qualname__}', @@ -3976,7 +4032,7 @@ def get_encoding(stream): })) or 'none')) write_debug(f'Proxy map: {self.proxies}') - # write_debug(f'Request Handlers: {", ".join(rh.RH_NAME for rh in self._request_director.handlers)}') + write_debug(f'Request Handlers: {", ".join(rh.RH_NAME for rh in self._request_director.handlers.values())}') for plugin_type, plugins in {'Extractor': plugin_ies, 'Post-Processor': plugin_pps}.items(): display_list = ['%s%s' % ( klass.__name__, '' if klass.__name__ == name else f' as {name}') @@ -4031,10 +4087,26 @@ def _opener(self): """ Get a urllib OpenerDirector from the Urllib handler (deprecated). """ - self.deprecation_warning('YoutubeDL._opener() is deprecated, use YoutubeDL.urlopen()') + self.deprecation_warning('YoutubeDL._opener is deprecated, use YoutubeDL.urlopen()') handler = self._request_director.handlers['Urllib'] return handler._get_instance(cookiejar=self.cookiejar, proxies=self.proxies) + def _get_available_impersonate_targets(self): + # todo(future): make available as public API + return [ + (target, rh.RH_NAME) + for rh in self._request_director.handlers.values() + if isinstance(rh, ImpersonateRequestHandler) + for target in rh.supported_targets + ] + + def _impersonate_target_available(self, target): + # todo(future): make available as public API + return any( + rh.is_supported_target(target) + for rh in self._request_director.handlers.values() + if isinstance(rh, ImpersonateRequestHandler)) + def urlopen(self, req): """ Start an HTTP download """ if isinstance(req, str): @@ -4059,12 +4131,36 @@ def urlopen(self, req): return self._request_director.send(req) except NoSupportingHandlers as e: for ue in e.unsupported_errors: + # FIXME: This depends on the order of errors. if not (ue.handler and ue.msg): continue if ue.handler.RH_KEY == 'Urllib' and 'unsupported url scheme: "file"' in ue.msg.lower(): raise RequestError( 'file:// URLs are disabled by default in yt-dlp for security reasons. ' 'Use --enable-file-urls to enable at your own risk.', cause=ue) from ue + if ( + 'unsupported proxy type: "https"' in ue.msg.lower() + and 'requests' not in self._request_director.handlers + and 'curl_cffi' not in self._request_director.handlers + ): + raise RequestError( + 'To use an HTTPS proxy for this request, one of the following dependencies needs to be installed: requests, curl_cffi') + + elif ( + re.match(r'unsupported url scheme: "wss?"', ue.msg.lower()) + and 'websockets' not in self._request_director.handlers + ): + raise RequestError( + 'This request requires WebSocket support. ' + 'Ensure one of the following dependencies are installed: websockets', + cause=ue) from ue + + elif re.match(r'unsupported (?:extensions: impersonate|impersonate target)', ue.msg.lower()): + raise RequestError( + f'Impersonate target "{req.extensions["impersonate"]}" is not available.' + f' See --list-impersonate-targets for available targets.' + f' This request requires browser impersonation, however you may be missing dependencies' + f' required to support this target.') raise except SSLError as e: if 'UNSAFE_LEGACY_RENEGOTIATION_DISABLED' in str(e): @@ -4074,12 +4170,10 @@ def urlopen(self, req): 'SSLV3_ALERT_HANDSHAKE_FAILURE: The server may not support the current cipher list. ' 'Try using --legacy-server-connect', cause=e) from e raise - except HTTPError as e: # TODO: Remove in a future release - raise _CompatHTTPError(e) from e - def build_request_director(self, handlers): + def build_request_director(self, handlers, preferences=None): logger = _YDLLogger(self) - headers = self.params.get('http_headers').copy() + headers = self.params['http_headers'].copy() proxies = self.proxies.copy() clean_headers(headers) clean_proxies(proxies, headers) @@ -4099,6 +4193,7 @@ def build_request_director(self, handlers): 'timeout': 'socket_timeout', 'legacy_ssl_support': 'legacyserverconnect', 'enable_file_urls': 'enable_file_urls', + 'impersonate': 'impersonate', 'client_cert': { 'client_certificate': 'client_certificate', 'client_certificate_key': 'client_certificate_key', @@ -4106,8 +4201,15 @@ def build_request_director(self, handlers): }, }), )) + director.preferences.update(preferences or []) + if 'prefer-legacy-http-handler' in self.params['compat_opts']: + director.preferences.add(lambda rh, _: 500 if rh.RH_KEY == 'Urllib' else 0) return director + @functools.cached_property + def _request_director(self): + return self.build_request_director(_REQUEST_HANDLERS.values(), _RH_PREFERENCES) + def encode(self, s): if isinstance(s, bytes): return s # Already encoded @@ -4228,7 +4330,7 @@ def _write_subtitles(self, info_dict, filename): return ret def _write_thumbnails(self, label, info_dict, filename, thumb_filename_base=None): - ''' Write thumbnails to file and return list of (thumb_filename, final_thumb_filename) ''' + ''' Write thumbnails to file and return list of (thumb_filename, final_thumb_filename); or None if error ''' write_all = self.params.get('write_all_thumbnails', False) thumbnails, ret = [], [] if write_all or self.params.get('writethumbnail', False): @@ -4244,6 +4346,9 @@ def _write_thumbnails(self, label, info_dict, filename, thumb_filename_base=None self.write_debug(f'Skipping writing {label} thumbnail') return ret + if thumbnails and not self._ensure_dir_exists(filename): + return None + for idx, t in list(enumerate(thumbnails))[::-1]: thumb_ext = (f'{t["id"]}.' if multiple else '') + determine_ext(t['url'], 'jpg') thumb_display_id = f'{label} thumbnail {t["id"]}'