]> jfr.im git - yt-dlp.git/commitdiff
[cleanup] Misc cleanup and refactor (#2173)
authorpukkandan <redacted>
Sun, 17 Apr 2022 20:58:28 +0000 (02:28 +0530)
committerpukkandan <redacted>
Sun, 17 Apr 2022 20:58:28 +0000 (02:28 +0530)
31 files changed:
devscripts/make_readme.py
setup.cfg
supportedsites.md
test/test_compat.py
test/test_execution.py
test/test_utils.py
test/test_verbose_output.py
test/test_write_annotations.py.disabled
test/test_youtube_signature.py
yt_dlp/YoutubeDL.py
yt_dlp/__init__.py
yt_dlp/aes.py
yt_dlp/cache.py
yt_dlp/compat.py
yt_dlp/cookies.py
yt_dlp/downloader/common.py
yt_dlp/downloader/fragment.py
yt_dlp/downloader/websocket.py
yt_dlp/extractor/__init__.py
yt_dlp/extractor/cpac.py
yt_dlp/extractor/extractors.py
yt_dlp/extractor/openload.py
yt_dlp/extractor/rtve.py
yt_dlp/extractor/spotify.py
yt_dlp/extractor/youtube.py
yt_dlp/jsinterp.py
yt_dlp/options.py
yt_dlp/postprocessor/common.py
yt_dlp/postprocessor/metadataparser.py
yt_dlp/utils.py
yt_dlp/webvtt.py

index 1719ac8e43e84cb06983e9edc89dbdcdafc90512..1401c2e5a74310c6873aa67a05c41b8bf95f9d3e 100755 (executable)
@@ -6,22 +6,25 @@
 import sys
 
 README_FILE = 'README.md'
-helptext = sys.stdin.read()
 
+OPTIONS_START = 'General Options:'
+OPTIONS_END = 'CONFIGURATION'
+EPILOG_START = 'See full documentation'
+
+
+helptext = sys.stdin.read()
 if isinstance(helptext, bytes):
     helptext = helptext.decode('utf-8')
 
-with open(README_FILE, encoding='utf-8') as f:
-    oldreadme = f.read()
+start, end = helptext.index(f'\n  {OPTIONS_START}'), helptext.index(f'\n{EPILOG_START}')
+options = re.sub(r'(?m)^  (\w.+)$', r'## \1', helptext[start + 1: end + 1])
 
-header = oldreadme[:oldreadme.index('## General Options:')]
-footer = oldreadme[oldreadme.index('# CONFIGURATION'):]
+with open(README_FILE, encoding='utf-8') as f:
+    readme = f.read()
 
-options = helptext[helptext.index('  General Options:'):]
-options = re.sub(r'(?m)^  (\w.+)$', r'## \1', options)
-options = options + '\n'
+header = readme[:readme.index(f'## {OPTIONS_START}')]
+footer = readme[readme.index(f'# {OPTIONS_END}'):]
 
 with open(README_FILE, 'w', encoding='utf-8') as f:
-    f.write(header)
-    f.write(options)
-    f.write(footer)
+    for part in (header, options, footer):
+        f.write(part)
index 59372d93a5eb175a207ceb180fb8f9a0b8a753ac..5fe95226add67bab7bf6903184334bc2f2504805 100644 (file)
--- a/setup.cfg
+++ b/setup.cfg
@@ -2,5 +2,5 @@
 universal = True
 
 [flake8]
-exclude = yt_dlp/extractor/__init__.py,devscripts/buildserver.py,devscripts/lazy_load_template.py,devscripts/make_issue_template.py,setup.py,build,.git,venv,devscripts/create-github-release.py,devscripts/release.sh,devscripts/show-downloads-statistics.py
-ignore = E402,E501,E731,E741,W503
\ No newline at end of file
+exclude = devscripts/lazy_load_template.py,devscripts/make_issue_template.py,setup.py,build,.git,venv
+ignore = E402,E501,E731,E741,W503
index eac7842a3dfbe122c6cb4c837f22a497452700f2..746a93de624f1fb8ca2732ef487de4b8692f4aca 100644 (file)
@@ -1147,8 +1147,8 @@ # Supported sites
  - **Sport5**
  - **SportBox**
  - **SportDeutschland**
- - **spotify**
- - **spotify:show**
+ - **spotify**: Spotify episodes
+ - **spotify:show**: Spotify shows
  - **Spreaker**
  - **SpreakerPage**
  - **SpreakerShow**
index 20dab9573ff808ffcf2de90e6e3940fe28ea24b8..29e7384f049631c032b044ba118a8f1a3c199abc 100644 (file)
@@ -35,10 +35,12 @@ def test_compat_setenv(self):
 
     def test_compat_expanduser(self):
         old_home = os.environ.get('HOME')
-        test_str = r'C:\Documents and Settings\тест\Application Data'
-        compat_setenv('HOME', test_str)
-        self.assertEqual(compat_expanduser('~'), test_str)
-        compat_setenv('HOME', old_home or '')
+        test_str = R'C:\Documents and Settings\тест\Application Data'
+        try:
+            compat_setenv('HOME', test_str)
+            self.assertEqual(compat_expanduser('~'), test_str)
+        finally:
+            compat_setenv('HOME', old_home or '')
 
     def test_all_present(self):
         import yt_dlp.compat
index 6a3e9944bfb4f1ed75d1ab083adddb6b9dfbefd4..6efd432e971156f27eaa5bb7f9e1339531969207 100644 (file)
@@ -1,4 +1,5 @@
 #!/usr/bin/env python3
+import contextlib
 import os
 import subprocess
 import sys
@@ -22,14 +23,14 @@ def test_import(self):
         subprocess.check_call([sys.executable, '-c', 'import yt_dlp'], cwd=rootDir)
 
     def test_module_exec(self):
-        subprocess.check_call([sys.executable, '-m', 'yt_dlp', '--version'], cwd=rootDir, stdout=_DEV_NULL)
+        subprocess.check_call([sys.executable, '-m', 'yt_dlp', '--ignore-config', '--version'], cwd=rootDir, stdout=_DEV_NULL)
 
     def test_main_exec(self):
-        subprocess.check_call([sys.executable, 'yt_dlp/__main__.py', '--version'], cwd=rootDir, stdout=_DEV_NULL)
+        subprocess.check_call([sys.executable, 'yt_dlp/__main__.py', '--ignore-config', '--version'], cwd=rootDir, stdout=_DEV_NULL)
 
     def test_cmdline_umlauts(self):
         p = subprocess.Popen(
-            [sys.executable, 'yt_dlp/__main__.py', encodeArgument('ä'), '--version'],
+            [sys.executable, 'yt_dlp/__main__.py', '--ignore-config', encodeArgument('ä'), '--version'],
             cwd=rootDir, stdout=_DEV_NULL, stderr=subprocess.PIPE)
         _, stderr = p.communicate()
         self.assertFalse(stderr)
@@ -39,10 +40,8 @@ def test_lazy_extractors(self):
             subprocess.check_call([sys.executable, 'devscripts/make_lazy_extractors.py', 'yt_dlp/extractor/lazy_extractors.py'], cwd=rootDir, stdout=_DEV_NULL)
             subprocess.check_call([sys.executable, 'test/test_all_urls.py'], cwd=rootDir, stdout=_DEV_NULL)
         finally:
-            try:
+            with contextlib.suppress(OSError):
                 os.remove('yt_dlp/extractor/lazy_extractors.py')
-            except OSError:
-                pass
 
 
 if __name__ == '__main__':
index 7909dc61c060dc9228c3dde7555aad1a4979f61f..5e220087b51ad8ca32df16324f58db660a6f4ccd 100644 (file)
@@ -1,5 +1,6 @@
 #!/usr/bin/env python3
 # Allow direct execution
+import contextlib
 import os
 import sys
 import unittest
@@ -267,11 +268,18 @@ def env(var):
 
         compat_setenv('yt_dlp_EXPATH_PATH', 'expanded')
         self.assertEqual(expand_path(env('yt_dlp_EXPATH_PATH')), 'expanded')
-        self.assertEqual(expand_path(env('HOME')), compat_getenv('HOME'))
-        self.assertEqual(expand_path('~'), compat_getenv('HOME'))
-        self.assertEqual(
-            expand_path('~/%s' % env('yt_dlp_EXPATH_PATH')),
-            '%s/expanded' % compat_getenv('HOME'))
+
+        old_home = os.environ.get('HOME')
+        test_str = R'C:\Documents and Settings\тест\Application Data'
+        try:
+            compat_setenv('HOME', test_str)
+            self.assertEqual(expand_path(env('HOME')), compat_getenv('HOME'))
+            self.assertEqual(expand_path('~'), compat_getenv('HOME'))
+            self.assertEqual(
+                expand_path('~/%s' % env('yt_dlp_EXPATH_PATH')),
+                '%s/expanded' % compat_getenv('HOME'))
+        finally:
+            compat_setenv('HOME', old_home or '')
 
     def test_prepend_extension(self):
         self.assertEqual(prepend_extension('abc.ext', 'temp'), 'abc.temp.ext')
@@ -1814,10 +1822,8 @@ def test_locked_file(self):
                         else:
                             self.assertFalse(testing_write, f'{test_mode} is not blocked by {lock_mode}')
         finally:
-            try:
+            with contextlib.suppress(OSError):
                 os.remove(FILE)
-            except Exception:
-                pass
 
 
 if __name__ == '__main__':
index 1213a97266f70354a775d386f5420da770ef1bdf..6579940740a23201cfe3bf522c66a5ca58ad4484 100644 (file)
@@ -13,7 +13,8 @@ class TestVerboseOutput(unittest.TestCase):
     def test_private_info_arg(self):
         outp = subprocess.Popen(
             [
-                sys.executable, 'yt_dlp/__main__.py', '-v',
+                sys.executable, 'yt_dlp/__main__.py',
+                '-v', '--ignore-config',
                 '--username', 'johnsmith@gmail.com',
                 '--password', 'my_secret_password',
             ], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
@@ -26,7 +27,8 @@ def test_private_info_arg(self):
     def test_private_info_shortarg(self):
         outp = subprocess.Popen(
             [
-                sys.executable, 'yt_dlp/__main__.py', '-v',
+                sys.executable, 'yt_dlp/__main__.py',
+                '-v', '--ignore-config',
                 '-u', 'johnsmith@gmail.com',
                 '-p', 'my_secret_password',
             ], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
@@ -39,7 +41,8 @@ def test_private_info_shortarg(self):
     def test_private_info_eq(self):
         outp = subprocess.Popen(
             [
-                sys.executable, 'yt_dlp/__main__.py', '-v',
+                sys.executable, 'yt_dlp/__main__.py',
+                '-v', '--ignore-config',
                 '--username=johnsmith@gmail.com',
                 '--password=my_secret_password',
             ], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
@@ -52,7 +55,8 @@ def test_private_info_eq(self):
     def test_private_info_shortarg_eq(self):
         outp = subprocess.Popen(
             [
-                sys.executable, 'yt_dlp/__main__.py', '-v',
+                sys.executable, 'yt_dlp/__main__.py',
+                '-v', '--ignore-config',
                 '-u=johnsmith@gmail.com',
                 '-p=my_secret_password',
             ], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
index bf13efe2c3eb874884a71b3d401d8abde78d45c2..cca60561f510842593698f4b248e0d1361c2fc43 100644 (file)
@@ -6,7 +6,6 @@ import unittest
 
 sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
 
-import io
 import xml.etree.ElementTree
 from test.helper import get_params, is_download_test, try_rm
 
index ca23c910d5e3d06e0225ee59aad67c57fec83e1f..2c2013295ced1a91091f2d4a15e7379cf65c807e 100644 (file)
@@ -1,5 +1,6 @@
 #!/usr/bin/env python3
 # Allow direct execution
+import contextlib
 import os
 import sys
 import unittest
@@ -127,11 +128,9 @@ def setUp(self):
             os.mkdir(self.TESTDATA_DIR)
 
     def tearDown(self):
-        try:
+        with contextlib.suppress(OSError):
             for f in os.listdir(self.TESTDATA_DIR):
                 os.remove(f)
-        except OSError:
-            pass
 
 
 def t_factory(name, sig_func, url_pattern):
index eaf2d9216788d137df6f1475bdae72c84768bbfb..155b5a063fe5640e5748489815e9485302b9bfe4 100644 (file)
@@ -23,7 +23,6 @@
 import traceback
 import unicodedata
 import urllib.request
-from enum import Enum
 from string import ascii_letters
 
 from .cache import Cache
@@ -82,6 +81,7 @@
     ISO3166Utils,
     LazyList,
     MaxDownloadsReached,
+    Namespace,
     PagedList,
     PerRequestProxyHandler,
     Popen,
@@ -878,14 +878,15 @@ def trouble(self, message=None, tb=None, is_error=True):
             raise DownloadError(message, exc_info)
         self._download_retcode = 1
 
-    class Styles(Enum):
-        HEADERS = 'yellow'
-        EMPHASIS = 'light blue'
-        ID = 'green'
-        DELIM = 'blue'
-        ERROR = 'red'
-        WARNING = 'yellow'
-        SUPPRESS = 'light black'
+    Styles = Namespace(
+        HEADERS='yellow',
+        EMPHASIS='light blue',
+        ID='green',
+        DELIM='blue',
+        ERROR='red',
+        WARNING='yellow',
+        SUPPRESS='light black',
+    )
 
     def _format_text(self, handle, allow_colors, text, f, fallback=None, *, test_encoding=False):
         text = str(text)
@@ -896,8 +897,6 @@ def _format_text(self, handle, allow_colors, text, f, fallback=None, *, test_enc
             text = text.encode(encoding, 'ignore').decode(encoding)
             if fallback is not None and text != original_text:
                 text = fallback
-        if isinstance(f, Enum):
-            f = f.value
         return format_text(text, f) if allow_colors else text if fallback is None else fallback
 
     def _format_screen(self, *args, **kwargs):
@@ -1760,7 +1759,8 @@ def get_entry(i):
             playlist_index, entry = entry_tuple
             if 'playlist-index' in self.params.get('compat_opts', []):
                 playlist_index = playlistitems[i - 1] if playlistitems else i + playliststart - 1
-            self.to_screen(f'[download] Downloading video {i} of {n_entries}')
+            self.to_screen('[download] Downloading video %s of %s' % (
+                self._format_screen(i, self.Styles.ID), self._format_screen(n_entries, self.Styles.EMPHASIS)))
             # This __x_forwarded_for_ip thing is a bit ugly but requires
             # minimal changes
             if x_forwarded_for:
@@ -2337,11 +2337,9 @@ def _fill_common_fields(self, info_dict, is_video=True):
             if info_dict.get(date_key) is None and info_dict.get(ts_key) is not None:
                 # Working around out-of-range timestamp values (e.g. negative ones on Windows,
                 # see http://bugs.python.org/issue1646728)
-                try:
+                with contextlib.suppress(ValueError, OverflowError, OSError):
                     upload_date = datetime.datetime.utcfromtimestamp(info_dict[ts_key])
                     info_dict[date_key] = upload_date.strftime('%Y%m%d')
-                except (ValueError, OverflowError, OSError):
-                    pass
 
         live_keys = ('is_live', 'was_live')
         live_status = info_dict.get('live_status')
@@ -3631,10 +3629,8 @@ def get_encoding(stream):
                 if re.match('[0-9a-f]+', out):
                     write_debug('Git HEAD: %s' % out)
             except Exception:
-                try:
+                with contextlib.suppress(Exception):
                     sys.exc_clear()
-                except Exception:
-                    pass
 
         def python_implementation():
             impl_name = platform.python_implementation()
@@ -3651,7 +3647,7 @@ def python_implementation():
         exe_versions, ffmpeg_features = FFmpegPostProcessor.get_versions_and_features(self)
         ffmpeg_features = {key for key, val in ffmpeg_features.items() if val}
         if ffmpeg_features:
-            exe_versions['ffmpeg'] += ' (%s)' % ','.join(ffmpeg_features)
+            exe_versions['ffmpeg'] += ' (%s)' % ','.join(sorted(ffmpeg_features))
 
         exe_versions['rtmpdump'] = rtmpdump_version()
         exe_versions['phantomjs'] = PhantomJSwrapper._version()
index 24991e19b44f68ac51adcc0977330742406cb048..9ea13ad37a93b774741df7918bf3eba0306c8373 100644 (file)
@@ -404,7 +404,8 @@ def report_conflict(arg1, opt1, arg2='--allow-unplayable-formats', opt2='allow_u
     report_conflict('--sponskrub', 'sponskrub', '--remove-chapters', 'remove_chapters')
     report_conflict('--sponskrub', 'sponskrub', '--sponsorblock-mark', 'sponsorblock_mark')
     report_conflict('--sponskrub', 'sponskrub', '--sponsorblock-remove', 'sponsorblock_remove')
-    report_conflict('--sponskrub-cut', 'sponskrub_cut', '--split-chapter', 'split_chapters', val1=opts.sponskrub and opts.sponskrub_cut)
+    report_conflict('--sponskrub-cut', 'sponskrub_cut', '--split-chapter', 'split_chapters',
+                    val1=opts.sponskrub and opts.sponskrub_cut)
 
     # Conflicts with --allow-unplayable-formats
     report_conflict('--add-metadata', 'addmetadata')
index 01818df61cd2042c7c4eabf0f20d50d102f619d0..603f3d1875fe6aaad9093252075fd823a7ad190d 100644 (file)
@@ -493,7 +493,7 @@ def ghash(subkey, data):
 
     last_y = [0] * BLOCK_SIZE_BYTES
     for i in range(0, len(data), BLOCK_SIZE_BYTES):
-        block = data[i : i + BLOCK_SIZE_BYTES]  # noqa: E203
+        block = data[i: i + BLOCK_SIZE_BYTES]
         last_y = block_product(xor(last_y, block), subkey)
 
     return last_y
index 0cac3ee888edb8a3a61463eac870372f8922c421..e3f8a7dab2ff2e8e7ffe2f3071471f5dc011c739 100644 (file)
@@ -1,3 +1,4 @@
+import contextlib
 import errno
 import json
 import os
@@ -57,7 +58,7 @@ def load(self, section, key, dtype='json', default=None):
             return default
 
         cache_fn = self._get_cache_fn(section, key, dtype)
-        try:
+        with contextlib.suppress(OSError):
             try:
                 with open(cache_fn, encoding='utf-8') as cachef:
                     self._ydl.write_debug(f'Loading {section}.{key} from cache')
@@ -68,8 +69,6 @@ def load(self, section, key, dtype='json', default=None):
                 except OSError as oe:
                     file_size = str(oe)
                 self._ydl.report_warning(f'Cache retrieval from {cache_fn} failed ({file_size})')
-        except OSError:
-            pass  # No cache available
 
         return default
 
index df0c54606ea320b872a42e54be55c1ca04a41243..f18c6cce287bfc44de89602b4724acce479ba93e 100644 (file)
@@ -1,6 +1,7 @@
 import asyncio
 import base64
 import collections
+import contextlib
 import ctypes
 import getpass
 import html
@@ -54,14 +55,11 @@ def compat_etree_fromstring(text):
     def compat_shlex_quote(s):
         return s if re.match(r'^[-_\w./]+$', s) else '"%s"' % s.replace('"', '\\"')
 else:
-    from shlex import quote as compat_shlex_quote
+    from shlex import quote as compat_shlex_quote  # noqa: F401
 
 
 def compat_ord(c):
-    if type(c) is int:
-        return c
-    else:
-        return ord(c)
+    return c if isinstance(c, int) else ord(c)
 
 
 def compat_setenv(key, value, env=os.environ):
@@ -118,16 +116,17 @@ def compat_asyncio_run(coro):
 # Python 3.8+ does not honor %HOME% on windows, but this breaks compatibility with youtube-dl
 # See https://github.com/yt-dlp/yt-dlp/issues/792
 # https://docs.python.org/3/library/os.path.html#os.path.expanduser
-if compat_os_name in ('nt', 'ce') and 'HOME' in os.environ:
-    _userhome = os.environ['HOME']
-
+if compat_os_name in ('nt', 'ce'):
     def compat_expanduser(path):
-        if not path.startswith('~'):
+        HOME = os.environ.get('HOME')
+        if not HOME:
+            return os.path.expanduser(path)
+        elif not path.startswith('~'):
             return path
         i = path.replace('\\', '/', 1).find('/')  # ~user
         if i < 0:
             i = len(path)
-        userhome = os.path.join(os.path.dirname(_userhome), path[1:i]) if i > 1 else _userhome
+        userhome = os.path.join(os.path.dirname(HOME), path[1:i]) if i > 1 else HOME
         return userhome + path[i:]
 else:
     compat_expanduser = os.path.expanduser
@@ -158,11 +157,9 @@ def windows_enable_vt_mode():  # TODO: Do this the proper way https://bugs.pytho
     global WINDOWS_VT_MODE
     startupinfo = subprocess.STARTUPINFO()
     startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
-    try:
+    with contextlib.suppress(Exception):
         subprocess.Popen('', shell=True, startupinfo=startupinfo).wait()
         WINDOWS_VT_MODE = True
-    except Exception:
-        pass
 
 
 #  Deprecated
index 6ff9f6f2d29dd5e8108becfa56c5f6cda70c7994..8a4baa5bb9eb37f79466bfa86485901ebe84452b 100644 (file)
@@ -167,7 +167,7 @@ def _firefox_browser_dir():
     if sys.platform in ('linux', 'linux2'):
         return os.path.expanduser('~/.mozilla/firefox')
     elif sys.platform == 'win32':
-        return os.path.expandvars(r'%APPDATA%\Mozilla\Firefox\Profiles')
+        return os.path.expandvars(R'%APPDATA%\Mozilla\Firefox\Profiles')
     elif sys.platform == 'darwin':
         return os.path.expanduser('~/Library/Application Support/Firefox')
     else:
@@ -191,12 +191,12 @@ def _get_chromium_based_browser_settings(browser_name):
         appdata_local = os.path.expandvars('%LOCALAPPDATA%')
         appdata_roaming = os.path.expandvars('%APPDATA%')
         browser_dir = {
-            'brave': os.path.join(appdata_local, r'BraveSoftware\Brave-Browser\User Data'),
-            'chrome': os.path.join(appdata_local, r'Google\Chrome\User Data'),
-            'chromium': os.path.join(appdata_local, r'Chromium\User Data'),
-            'edge': os.path.join(appdata_local, r'Microsoft\Edge\User Data'),
-            'opera': os.path.join(appdata_roaming, r'Opera Software\Opera Stable'),
-            'vivaldi': os.path.join(appdata_local, r'Vivaldi\User Data'),
+            'brave': os.path.join(appdata_local, R'BraveSoftware\Brave-Browser\User Data'),
+            'chrome': os.path.join(appdata_local, R'Google\Chrome\User Data'),
+            'chromium': os.path.join(appdata_local, R'Chromium\User Data'),
+            'edge': os.path.join(appdata_local, R'Microsoft\Edge\User Data'),
+            'opera': os.path.join(appdata_roaming, R'Opera Software\Opera Stable'),
+            'vivaldi': os.path.join(appdata_local, R'Vivaldi\User Data'),
         }[browser_name]
 
     elif sys.platform == 'darwin':
@@ -237,8 +237,8 @@ def _extract_chrome_cookies(browser_name, profile, keyring, logger):
     logger.info(f'Extracting cookies from {browser_name}')
 
     if not SQLITE_AVAILABLE:
-        logger.warning(('Cannot extract cookies from {} without sqlite3 support. '
-                        'Please use a python interpreter compiled with sqlite3 support').format(browser_name))
+        logger.warning(f'Cannot extract cookies from {browser_name} without sqlite3 support. '
+                       'Please use a python interpreter compiled with sqlite3 support')
         return YoutubeDLCookieJar()
 
     config = _get_chromium_based_browser_settings(browser_name)
@@ -269,8 +269,7 @@ def _extract_chrome_cookies(browser_name, profile, keyring, logger):
             cursor.connection.text_factory = bytes
             column_names = _get_column_names(cursor, 'cookies')
             secure_column = 'is_secure' if 'is_secure' in column_names else 'secure'
-            cursor.execute('SELECT host_key, name, value, encrypted_value, path, '
-                           'expires_utc, {} FROM cookies'.format(secure_column))
+            cursor.execute(f'SELECT host_key, name, value, encrypted_value, path, expires_utc, {secure_column} FROM cookies')
             jar = YoutubeDLCookieJar()
             failed_cookies = 0
             unencrypted_cookies = 0
@@ -346,11 +345,11 @@ class ChromeCookieDecryptor:
     """
 
     def decrypt(self, encrypted_value):
-        raise NotImplementedError
+        raise NotImplementedError('Must be implemented by sub classes')
 
     @property
     def cookie_counts(self):
-        raise NotImplementedError
+        raise NotImplementedError('Must be implemented by sub classes')
 
 
 def get_cookie_decryptor(browser_root, browser_keyring_name, logger, *, keyring=None):
@@ -361,8 +360,7 @@ def get_cookie_decryptor(browser_root, browser_keyring_name, logger, *, keyring=
     elif sys.platform == 'win32':
         return WindowsChromeCookieDecryptor(browser_root, logger)
     else:
-        raise NotImplementedError('Chrome cookie decryption is not supported '
-                                  'on this platform: {}'.format(sys.platform))
+        raise NotImplementedError(f'Chrome cookie decryption is not supported on this platform: {sys.platform}')
 
 
 class LinuxChromeCookieDecryptor(ChromeCookieDecryptor):
@@ -546,8 +544,7 @@ def read_cstring(self):
 
     def skip(self, num_bytes, description='unknown'):
         if num_bytes > 0:
-            self._logger.debug('skipping {} bytes ({}): {}'.format(
-                num_bytes, description, self.read_bytes(num_bytes)))
+            self._logger.debug(f'skipping {num_bytes} bytes ({description}): {self.read_bytes(num_bytes)!r}')
         elif num_bytes < 0:
             raise ParserError(f'invalid skip of {num_bytes} bytes')
 
@@ -784,8 +781,8 @@ def _get_kwallet_password(browser_keyring_name, logger):
 
         stdout, stderr = proc.communicate_or_kill()
         if proc.returncode != 0:
-            logger.error('kwallet-query failed with return code {}. Please consult '
-                         'the kwallet-query man page for details'.format(proc.returncode))
+            logger.error(f'kwallet-query failed with return code {proc.returncode}. Please consult '
+                         'the kwallet-query man page for details')
             return b''
         else:
             if stdout.lower().startswith(b'failed to read'):
index 3033926ae18917161daf64592579ff16c47598bb..3e539698864bf426531925c8dde76dda8b5f630a 100644 (file)
@@ -1,3 +1,4 @@
+import contextlib
 import errno
 import os
 import random
@@ -12,6 +13,7 @@
 )
 from ..utils import (
     LockingUnsupportedError,
+    Namespace,
     decodeArgument,
     encodeFilename,
     error_to_compat_str,
@@ -70,12 +72,30 @@ class FileDownloader:
 
     def __init__(self, ydl, params):
         """Create a FileDownloader object with the given options."""
-        self.ydl = ydl
+        self._set_ydl(ydl)
         self._progress_hooks = []
         self.params = params
         self._prepare_multiline_status()
         self.add_progress_hook(self.report_progress)
 
+    def _set_ydl(self, ydl):
+        self.ydl = ydl
+
+        for func in (
+            'deprecation_warning',
+            'report_error',
+            'report_file_already_downloaded',
+            'report_warning',
+            'to_console_title',
+            'to_stderr',
+            'trouble',
+            'write_debug',
+        ):
+            setattr(self, func, getattr(ydl, func))
+
+    def to_screen(self, *args, **kargs):
+        self.ydl.to_screen(*args, quiet=self.params.get('quiet'), **kargs)
+
     @staticmethod
     def format_seconds(seconds):
         time = timetuple_from_msec(seconds * 1000)
@@ -157,27 +177,6 @@ def parse_bytes(bytestr):
         multiplier = 1024.0 ** 'bkmgtpezy'.index(matchobj.group(2).lower())
         return int(round(number * multiplier))
 
-    def to_screen(self, *args, **kargs):
-        self.ydl.to_screen(*args, quiet=self.params.get('quiet'), **kargs)
-
-    def to_stderr(self, message):
-        self.ydl.to_stderr(message)
-
-    def to_console_title(self, message):
-        self.ydl.to_console_title(message)
-
-    def trouble(self, *args, **kargs):
-        self.ydl.trouble(*args, **kargs)
-
-    def report_warning(self, *args, **kargs):
-        self.ydl.report_warning(*args, **kargs)
-
-    def report_error(self, *args, **kargs):
-        self.ydl.report_error(*args, **kargs)
-
-    def write_debug(self, *args, **kargs):
-        self.ydl.write_debug(*args, **kargs)
-
     def slow_down(self, start_time, now, byte_counter):
         """Sleep if the download speed is over the rate limit."""
         rate_limit = self.params.get('ratelimit')
@@ -263,10 +262,8 @@ def try_utime(self, filename, last_modified_hdr):
         # Ignore obviously invalid dates
         if filetime == 0:
             return
-        try:
+        with contextlib.suppress(Exception):
             os.utime(filename, (time.time(), filetime))
-        except Exception:
-            pass
         return filetime
 
     def report_destination(self, filename):
@@ -287,18 +284,18 @@ def _prepare_multiline_status(self, lines=1):
     def _finish_multiline_status(self):
         self._multiline.end()
 
-    _progress_styles = {
-        'downloaded_bytes': 'light blue',
-        'percent': 'light blue',
-        'eta': 'yellow',
-        'speed': 'green',
-        'elapsed': 'bold white',
-        'total_bytes': '',
-        'total_bytes_estimate': '',
-    }
+    ProgressStyles = Namespace(
+        downloaded_bytes='light blue',
+        percent='light blue',
+        eta='yellow',
+        speed='green',
+        elapsed='bold white',
+        total_bytes='',
+        total_bytes_estimate='',
+    )
 
     def _report_progress_status(self, s, default_template):
-        for name, style in self._progress_styles.items():
+        for name, style in self.ProgressStyles._asdict().items():
             name = f'_{name}_str'
             if name not in s:
                 continue
@@ -391,10 +388,6 @@ def report_retry(self, err, count, retries):
             '[download] Got server HTTP error: %s. Retrying (attempt %d of %s) ...'
             % (error_to_compat_str(err), count, self.format_retries(retries)))
 
-    def report_file_already_downloaded(self, *args, **kwargs):
-        """Report file has already been fully downloaded."""
-        return self.ydl.report_file_already_downloaded(*args, **kwargs)
-
     def report_unable_to_resume(self):
         """Report it was impossible to resume download."""
         self.to_screen('[download] Unable to resume')
@@ -433,25 +426,16 @@ def download(self, filename, info_dict, subtitle=False):
                 self._finish_multiline_status()
                 return True, False
 
-        if subtitle is False:
-            min_sleep_interval = self.params.get('sleep_interval')
-            if min_sleep_interval:
-                max_sleep_interval = self.params.get('max_sleep_interval', min_sleep_interval)
-                sleep_interval = random.uniform(min_sleep_interval, max_sleep_interval)
-                self.to_screen(
-                    '[download] Sleeping %s seconds ...' % (
-                        int(sleep_interval) if sleep_interval.is_integer()
-                        else '%.2f' % sleep_interval))
-                time.sleep(sleep_interval)
+        if subtitle:
+            sleep_interval = self.params.get('sleep_interval_subtitles') or 0
         else:
-            sleep_interval_sub = 0
-            if type(self.params.get('sleep_interval_subtitles')) is int:
-                sleep_interval_sub = self.params.get('sleep_interval_subtitles')
-            if sleep_interval_sub > 0:
-                self.to_screen(
-                    '[download] Sleeping %s seconds ...' % (
-                        sleep_interval_sub))
-                time.sleep(sleep_interval_sub)
+            min_sleep_interval = self.params.get('sleep_interval') or 0
+            sleep_interval = random.uniform(
+                min_sleep_interval, self.params.get('max_sleep_interval', min_sleep_interval))
+        if sleep_interval > 0:
+            self.to_screen(f'[download] Sleeping {sleep_interval:.2f} seconds ...')
+            time.sleep(sleep_interval)
+
         ret = self.real_download(filename, info_dict)
         self._finish_multiline_status()
         return ret, True
index 2a97cfd16127d9c3ebb01e998f626d1132c01480..390c840bbdf6bfd517750b7a3170b6b0c750fa97 100644 (file)
@@ -1,3 +1,4 @@
+import contextlib
 import http.client
 import json
 import math
@@ -310,10 +311,8 @@ def _finish_frag_download(self, ctx, info_dict):
             if self.params.get('updatetime', True):
                 filetime = ctx.get('fragment_filetime')
                 if filetime:
-                    try:
+                    with contextlib.suppress(Exception):
                         os.utime(ctx['filename'], (time.time(), filetime))
-                    except Exception:
-                        pass
             downloaded_bytes = os.path.getsize(encodeFilename(ctx['filename']))
 
         self._hook_progress({
@@ -523,7 +522,8 @@ def _download_fragment(fragment):
                     break
                 try:
                     download_fragment(fragment, ctx)
-                    result = append_fragment(decrypt_fragment(fragment, self._read_fragment(ctx)), fragment['frag_index'], ctx)
+                    result = append_fragment(
+                        decrypt_fragment(fragment, self._read_fragment(ctx)), fragment['frag_index'], ctx)
                 except KeyboardInterrupt:
                     if info_dict.get('is_live'):
                         break
index 96d113846994538b1712e84dc32fe9016f7fddb3..6b190cd90a1aa87474aa38e6a6d62deccd69da09 100644 (file)
@@ -1,4 +1,5 @@
 import asyncio
+import contextlib
 import os
 import signal
 import threading
@@ -29,11 +30,9 @@ async def call_conn(proc, stdin):
             except (BrokenPipeError, OSError):
                 pass
             finally:
-                try:
+                with contextlib.suppress(OSError):
                     stdin.flush()
                     stdin.close()
-                except OSError:
-                    pass
                 os.kill(os.getpid(), signal.SIGINT)
 
         class FFmpegStdinFD(FFmpegFD):
index b35484246aadb75154ff4ed83216f2b207c67e32..6288c5c6bb58677e9d5d6466d906e586178937c7 100644 (file)
@@ -1,24 +1,23 @@
+import contextlib
 import os
 
 from ..utils import load_plugins
 
 _LAZY_LOADER = False
 if not os.environ.get('YTDLP_NO_LAZY_EXTRACTORS'):
-    try:
-        from .lazy_extractors import *
+    with contextlib.suppress(ImportError):
+        from .lazy_extractors import *  # noqa: F403
         from .lazy_extractors import _ALL_CLASSES
         _LAZY_LOADER = True
-    except ImportError:
-        pass
 
 if not _LAZY_LOADER:
-    from .extractors import *
-    _ALL_CLASSES = [
+    from .extractors import *  # noqa: F403
+    _ALL_CLASSES = [  # noqa: F811
         klass
         for name, klass in globals().items()
         if name.endswith('IE') and name != 'GenericIE'
     ]
-    _ALL_CLASSES.append(GenericIE)
+    _ALL_CLASSES.append(GenericIE)  # noqa: F405
 
 _PLUGIN_CLASSES = load_plugins('extractor', 'IE', globals())
 _ALL_CLASSES = list(_PLUGIN_CLASSES.values()) + _ALL_CLASSES
index e8975e5e2bca6081b8165cdbee2e5712066224ef..65ac2497f47a3620f76cd9259caa68b7c08492f3 100644 (file)
@@ -9,13 +9,6 @@
     urljoin,
 )
 
-# compat_range
-try:
-    if callable(xrange):
-        range = xrange
-except (NameError, TypeError):
-    pass
-
 
 class CPACIE(InfoExtractor):
     IE_NAME = 'cpac'
index cd3934a70d8ed3befa5006292f00a1cf336c5985..d67b2eeecc00a89e50fd7d9e2e472fe33f6dafdd 100644 (file)
@@ -1,4 +1,5 @@
-# flake8: noqa
+# flake8: noqa: F401
+
 from .abc import (
     ABCIE,
     ABCIViewIE,
index f2600aaa4e071baf648df92d55fe7d50171bf757..61e3a8b861f3b2317957e3e5ebb588eb31f81352 100644 (file)
@@ -1,3 +1,4 @@
+import contextlib
 import json
 import os
 import subprocess
@@ -31,13 +32,11 @@ def cookie_to_dict(cookie):
         cookie_dict['secure'] = cookie.secure
     if cookie.discard is not None:
         cookie_dict['discard'] = cookie.discard
-    try:
+    with contextlib.suppress(TypeError):
         if (cookie.has_nonstandard_attr('httpOnly')
                 or cookie.has_nonstandard_attr('httponly')
                 or cookie.has_nonstandard_attr('HttpOnly')):
             cookie_dict['httponly'] = True
-    except TypeError:
-        pass
     return cookie_dict
 
 
@@ -129,10 +128,8 @@ def __init__(self, extractor, required_version=None, timeout=10000):
 
     def __del__(self):
         for name in self._TMP_FILE_NAMES:
-            try:
+            with contextlib.suppress(OSError, KeyError):
                 os.remove(self._TMP_FILES[name].name)
-            except (OSError, KeyError):
-                pass
 
     def _save_cookies(self, url):
         cookies = cookie_jar_to_list(self.extractor._downloader.cookiejar)
index e5837e8c8752cb3e5bf89d5bcb74b53a1aed379c..42a6029688dd73e567d1ed706e70d4423c1e170b 100644 (file)
@@ -1,6 +1,5 @@
 import base64
 import io
-import sys
 
 from .common import InfoExtractor
 from ..compat import (
@@ -17,8 +16,6 @@
     try_get,
 )
 
-_bytes_to_chr = (lambda x: x) if sys.version_info[0] == 2 else (lambda x: map(chr, x))
-
 
 class RTVEALaCartaIE(InfoExtractor):
     IE_NAME = 'rtve.es:alacarta'
@@ -87,7 +84,7 @@ def _decrypt_url(png):
                 alphabet = []
                 e = 0
                 d = 0
-                for l in _bytes_to_chr(alphabet_data):
+                for l in alphabet_data.decode('iso-8859-1'):
                     if d == 0:
                         alphabet.append(l)
                         d = e = (e + 1) % 4
@@ -97,7 +94,7 @@ def _decrypt_url(png):
                 f = 0
                 e = 3
                 b = 1
-                for letter in _bytes_to_chr(url_data):
+                for letter in url_data.decode('iso-8859-1'):
                     if f == 0:
                         l = int(letter) * 10
                         f = 1
index 3128825e5d58db3efe31d20a51a5c96e01652ff8..a2068a1b6ec231f8cd50d90896d3a6a2a6fd94e9 100644 (file)
@@ -102,6 +102,7 @@ def _extract_embed_urls(cls, webpage):
 
 class SpotifyIE(SpotifyBaseIE):
     IE_NAME = 'spotify'
+    IE_DESC = 'Spotify episodes'
     _VALID_URL = SpotifyBaseIE._VALID_URL_TEMPL % 'episode'
     _TESTS = [{
         'url': 'https://open.spotify.com/episode/4Z7GAJ50bgctf6uclHlWKo',
@@ -131,6 +132,7 @@ def _real_extract(self, url):
 
 class SpotifyShowIE(SpotifyBaseIE):
     IE_NAME = 'spotify:show'
+    IE_DESC = 'Spotify shows'
     _VALID_URL = SpotifyBaseIE._VALID_URL_TEMPL % 'show'
     _TEST = {
         'url': 'https://open.spotify.com/show/4PM9Ke6l66IRNpottHKV9M',
index 43123094808acb76b0418deb160e2eac6cef4fd9..7da54e088676edd5e08d30e2640a8af17817a498 100644 (file)
@@ -3586,17 +3586,17 @@ def process_language(container, base_url, lang_code, sub_name, query):
                 headers=self.generate_api_headers(ytcfg=master_ytcfg),
                 note='Downloading initial data API JSON')
 
-        try:
-            # This will error if there is no livechat
+        try:  # This will error if there is no livechat
             initial_data['contents']['twoColumnWatchNextResults']['conversationBar']['liveChatRenderer']['continuations'][0]['reloadContinuationData']['continuation']
+        except (KeyError, IndexError, TypeError):
+            pass
+        else:
             info.setdefault('subtitles', {})['live_chat'] = [{
-                'url': 'https://www.youtube.com/watch?v=%s' % video_id,  # url is needed to set cookies
+                'url': f'https://www.youtube.com/watch?v={video_id}',  # url is needed to set cookies
                 'video_id': video_id,
                 'ext': 'json',
                 'protocol': 'youtube_live_chat' if is_live or is_upcoming else 'youtube_live_chat_replay',
             }]
-        except (KeyError, IndexError, TypeError):
-            pass
 
         if initial_data:
             info['chapters'] = (
index 001836887a0711e157c4d083d731f5100613267b..70857b79816812be79bc9c4d4a32deda3a8834e1 100644 (file)
@@ -1,7 +1,8 @@
+import collections
+import contextlib
 import json
 import operator
 import re
-from collections.abc import MutableMapping
 
 from .utils import ExtractorError, remove_quotes
 
@@ -35,38 +36,17 @@ def __init__(self):
         ExtractorError.__init__(self, 'Invalid continue')
 
 
-class LocalNameSpace(MutableMapping):
-    def __init__(self, *stack):
-        self.stack = tuple(stack)
-
-    def __getitem__(self, key):
-        for scope in self.stack:
-            if key in scope:
-                return scope[key]
-        raise KeyError(key)
-
+class LocalNameSpace(collections.ChainMap):
     def __setitem__(self, key, value):
-        for scope in self.stack:
+        for scope in self.maps:
             if key in scope:
                 scope[key] = value
-                break
-        else:
-            self.stack[0][key] = value
-        return value
+                return
+        self.maps[0][key] = value
 
     def __delitem__(self, key):
         raise NotImplementedError('Deleting is not supported')
 
-    def __iter__(self):
-        for scope in self.stack:
-            yield from scope
-
-    def __len__(self, key):
-        return len(iter(self))
-
-    def __repr__(self):
-        return f'LocalNameSpace{self.stack}'
-
 
 class JSInterpreter:
     def __init__(self, code, objects=None):
@@ -302,10 +282,8 @@ def interpret_expression(self, expr, local_vars, allow_recursion):
         if var_m:
             return local_vars[var_m.group('name')]
 
-        try:
+        with contextlib.suppress(ValueError):
             return json.loads(expr)
-        except ValueError:
-            pass
 
         m = re.match(
             r'(?P<in>%s)\[(?P<idx>.+)\]$' % _NAME_RE, expr)
@@ -521,14 +499,13 @@ def call_function(self, funcname, *args):
 
     def build_function(self, argnames, code, *global_stack):
         global_stack = list(global_stack) or [{}]
-        local_vars = global_stack.pop(0)
 
         def resf(args, **kwargs):
-            local_vars.update({
+            global_stack[0].update({
                 **dict(zip(argnames, args)),
                 **kwargs
             })
-            var_stack = LocalNameSpace(local_vars, *global_stack)
+            var_stack = LocalNameSpace(*global_stack)
             for stmt in self._separate(code.replace('\n', ''), ';'):
                 ret, should_abort = self.interpret_statement(stmt, var_stack)
                 if should_abort:
index 243beab4d14a4b042cfff4c9b54cb59180d70f3d..0c042caf4b39ccafc102f97fbe00df7b6ccd3a67 100644 (file)
@@ -21,6 +21,7 @@
     Config,
     expand_path,
     get_executable_path,
+    join_nonempty,
     remove_end,
     write_string,
 )
@@ -109,9 +110,43 @@ def load_configs():
     return parser, opts, args
 
 
+class _YoutubeDLHelpFormatter(optparse.IndentedHelpFormatter):
+    def __init__(self):
+        # No need to wrap help messages if we're on a wide console
+        max_width = compat_get_terminal_size().columns or 80
+        # 47% is chosen because that is how README.md is currently formatted
+        # and moving help text even further to the right is undesirable.
+        # This can be reduced in the future to get a prettier output
+        super().__init__(width=max_width, max_help_position=int(0.47 * max_width))
+
+    @staticmethod
+    def format_option_strings(option):
+        """ ('-o', '--option') -> -o, --format METAVAR """
+        opts = join_nonempty(
+            option._short_opts and option._short_opts[0],
+            option._long_opts and option._long_opts[0],
+            delim=', ')
+        if option.takes_value():
+            opts += f' {option.metavar}'
+        return opts
+
+
 class _YoutubeDLOptionParser(optparse.OptionParser):
     # optparse is deprecated since python 3.2. So assume a stable interface even for private methods
 
+    def __init__(self):
+        super().__init__(
+            prog='yt-dlp',
+            version=__version__,
+            usage='%prog [OPTIONS] URL [URL...]',
+            epilog='See full documentation at  https://github.com/yt-dlp/yt-dlp#readme',
+            formatter=_YoutubeDLHelpFormatter(),
+            conflict_handler='resolve',
+        )
+
+    def _get_args(self, args):
+        return sys.argv[1:] if args is None else list(args)
+
     def _match_long_opt(self, opt):
         """Improve ambigious argument resolution by comparing option objects instead of argument strings"""
         try:
@@ -123,23 +158,6 @@ def _match_long_opt(self, opt):
 
 
 def create_parser():
-    def _format_option_string(option):
-        ''' ('-o', '--option') -> -o, --format METAVAR'''
-
-        opts = []
-
-        if option._short_opts:
-            opts.append(option._short_opts[0])
-        if option._long_opts:
-            opts.append(option._long_opts[0])
-        if len(opts) > 1:
-            opts.insert(1, ', ')
-
-        if option.takes_value():
-            opts.append(' %s' % option.metavar)
-
-        return ''.join(opts)
-
     def _list_from_options_callback(option, opt_str, value, parser, append=True, delim=',', process=str.strip):
         # append can be True, False or -1 (prepend)
         current = list(getattr(parser.values, option.dest)) if append else []
@@ -204,23 +222,7 @@ def _dict_from_options_callback(
             out_dict[key] = out_dict.get(key, []) + [val] if append else val
         setattr(parser.values, option.dest, out_dict)
 
-    # No need to wrap help messages if we're on a wide console
-    columns = compat_get_terminal_size().columns
-    max_width = columns if columns else 80
-    # 47% is chosen because that is how README.md is currently formatted
-    # and moving help text even further to the right is undesirable.
-    # This can be reduced in the future to get a prettier output
-    max_help_position = int(0.47 * max_width)
-
-    fmt = optparse.IndentedHelpFormatter(width=max_width, max_help_position=max_help_position)
-    fmt.format_option_strings = _format_option_string
-
-    parser = _YoutubeDLOptionParser(
-        version=__version__,
-        formatter=fmt,
-        usage='%prog [OPTIONS] URL [URL...]',
-        conflict_handler='resolve'
-    )
+    parser = _YoutubeDLOptionParser()
 
     general = optparse.OptionGroup(parser, 'General Options')
     general.add_option(
@@ -1048,7 +1050,7 @@ def _dict_from_options_callback(
     verbosity.add_option(
         '-C', '--call-home',
         dest='call_home', action='store_true', default=False,
-        # help='[Broken] Contact the yt-dlp server for debugging')
+        # help='Contact the yt-dlp server for debugging')
         help=optparse.SUPPRESS_HELP)
     verbosity.add_option(
         '--no-call-home',
index fdea3a7ea1bec9fa3cfcd2cafadcb1131db08798..519d061383669c263c4acbba5af0cbb9eb37ff0f 100644 (file)
@@ -69,8 +69,8 @@ def pp_key(cls):
         return name[6:] if name[:6].lower() == 'ffmpeg' else name
 
     def to_screen(self, text, prefix=True, *args, **kwargs):
-        tag = '[%s] ' % self.PP_NAME if prefix else ''
         if self._downloader:
+            tag = '[%s] ' % self.PP_NAME if prefix else ''
             return self._downloader.to_screen(f'{tag}{text}', *args, **kwargs)
 
     def report_warning(self, text, *args, **kwargs):
index 5bc435da360b7f21ec92529c82666d960183d776..98885bd194c3d21f635c51acdbc610798a3bc351 100644 (file)
@@ -1,29 +1,25 @@
 import re
-from enum import Enum
 
 from .common import PostProcessor
+from ..utils import Namespace
 
 
 class MetadataParserPP(PostProcessor):
-    class Actions(Enum):
-        INTERPRET = 'interpretter'
-        REPLACE = 'replacer'
-
     def __init__(self, downloader, actions):
-        PostProcessor.__init__(self, downloader)
+        super().__init__(self, downloader)
         self._actions = []
         for f in actions:
-            action = f[0]
-            assert isinstance(action, self.Actions)
-            self._actions.append(getattr(self, action.value)(*f[1:]))
+            action, *args = f
+            assert action in self.Actions
+            self._actions.append(action(*args))
 
     @classmethod
     def validate_action(cls, action, *data):
-        ''' Each action can be:
+        """Each action can be:
                 (Actions.INTERPRET, from, to) OR
                 (Actions.REPLACE, field, search, replace)
-        '''
-        if not isinstance(action, cls.Actions):
+        """
+        if action not in cls.Actions:
             raise ValueError(f'{action!r} is not a valid action')
         getattr(cls, action.value)(cls, *data)  # So this can raise error to validate
 
@@ -99,6 +95,8 @@ def f(info):
         search_re = re.compile(search)
         return f
 
+    Actions = Namespace(INTERPRET=interpretter, REPLACE=replacer)
+
 
 class MetadataFromFieldPP(MetadataParserPP):
     @classmethod
index 34a938362913d8dc93a9a2295e910914ef00ae27..cf52fb2b63c989ce2d590d621c4a5e347a4903d9 100644 (file)
@@ -70,6 +70,7 @@
 
 try:
     import certifi
+
     # The certificate may not be bundled in executable
     has_certifi = os.path.exists(certifi.where())
 except ImportError:
@@ -282,22 +283,16 @@ def write_json_file(obj, fn):
         if sys.platform == 'win32':
             # Need to remove existing file on Windows, else os.rename raises
             # WindowsError or FileExistsError.
-            try:
+            with contextlib.suppress(OSError):
                 os.unlink(fn)
-            except OSError:
-                pass
-        try:
+        with contextlib.suppress(OSError):
             mask = os.umask(0)
             os.umask(mask)
             os.chmod(tf.name, 0o666 & ~mask)
-        except OSError:
-            pass
         os.rename(tf.name, fn)
     except Exception:
-        try:
+        with contextlib.suppress(OSError):
             os.remove(tf.name)
-        except OSError:
-            pass
         raise
 
 
@@ -575,12 +570,9 @@ def extract_attributes(html_element):
     }.
     """
     parser = HTMLAttributeParser()
-    try:
+    with contextlib.suppress(compat_HTMLParseError):
         parser.feed(html_element)
         parser.close()
-    # Older Python may throw HTMLParseError in case of malformed HTML
-    except compat_HTMLParseError:
-        pass
     return parser.attrs
 
 
@@ -800,10 +792,8 @@ def _htmlentity_transform(entity_with_semicolon):
         else:
             base = 10
         # See https://github.com/ytdl-org/youtube-dl/issues/7518
-        try:
+        with contextlib.suppress(ValueError):
             return compat_chr(int(numstr, base))
-        except ValueError:
-            pass
 
     # Unknown entity in name, return its literal representation
     return '&%s;' % entity
@@ -812,7 +802,7 @@ def _htmlentity_transform(entity_with_semicolon):
 def unescapeHTML(s):
     if s is None:
         return None
-    assert type(s) == compat_str
+    assert isinstance(s, str)
 
     return re.sub(
         r'&([^&;]+;)', lambda m: _htmlentity_transform(m.group(1)), s)
@@ -865,7 +855,7 @@ def get_subprocess_encoding():
 
 
 def encodeFilename(s, for_subprocess=False):
-    assert type(s) == str
+    assert isinstance(s, str)
     return s
 
 
@@ -924,10 +914,8 @@ def _ssl_load_windows_store_certs(ssl_context, storename):
     except PermissionError:
         return
     for cert in certs:
-        try:
+        with contextlib.suppress(ssl.SSLError):
             ssl_context.load_verify_locations(cadata=cert)
-        except ssl.SSLError:
-            pass
 
 
 def make_HTTPS_handler(params, **kwargs):
@@ -1391,7 +1379,7 @@ class SocksConnection(base_class):
         def connect(self):
             self.sock = sockssocket()
             self.sock.setproxy(*proxy_args)
-            if type(self.timeout) in (int, float):
+            if isinstance(self.timeout, (int, float)):
                 self.sock.settimeout(self.timeout)
             self.sock.connect((self.host, self.port))
 
@@ -1526,9 +1514,7 @@ def prepare_line(line):
                 try:
                     cf.write(prepare_line(line))
                 except compat_cookiejar.LoadError as e:
-                    write_string(
-                        'WARNING: skipping cookie file entry due to %s: %r\n'
-                        % (e, line), sys.stderr)
+                    write_string(f'WARNING: skipping cookie file entry due to {e}: {line!r}\n')
                     continue
         cf.seek(0)
         self._really_load(cf, filename, ignore_discard, ignore_expires)
@@ -1646,12 +1632,10 @@ def parse_iso8601(date_str, delimiter='T', timezone=None):
     if timezone is None:
         timezone, date_str = extract_timezone(date_str)
 
-    try:
+    with contextlib.suppress(ValueError):
         date_format = f'%Y-%m-%d{delimiter}%H:%M:%S'
         dt = datetime.datetime.strptime(date_str, date_format) - timezone
         return calendar.timegm(dt.timetuple())
-    except ValueError:
-        pass
 
 
 def date_formats(day_first=True):
@@ -1671,17 +1655,13 @@ def unified_strdate(date_str, day_first=True):
     _, date_str = extract_timezone(date_str)
 
     for expression in date_formats(day_first):
-        try:
+        with contextlib.suppress(ValueError):
             upload_date = datetime.datetime.strptime(date_str, expression).strftime('%Y%m%d')
-        except ValueError:
-            pass
     if upload_date is None:
         timetuple = email.utils.parsedate_tz(date_str)
         if timetuple:
-            try:
+            with contextlib.suppress(ValueError):
                 upload_date = datetime.datetime(*timetuple[:6]).strftime('%Y%m%d')
-            except ValueError:
-                pass
     if upload_date is not None:
         return compat_str(upload_date)
 
@@ -1709,11 +1689,9 @@ def unified_timestamp(date_str, day_first=True):
         date_str = m.group(1)
 
     for expression in date_formats(day_first):
-        try:
+        with contextlib.suppress(ValueError):
             dt = datetime.datetime.strptime(date_str, expression) - timezone + datetime.timedelta(hours=pm_delta)
             return calendar.timegm(dt.timetuple())
-        except ValueError:
-            pass
     timetuple = email.utils.parsedate_tz(date_str)
     if timetuple:
         return calendar.timegm(timetuple) + pm_delta * 3600
@@ -1879,9 +1857,8 @@ def get_windows_version():
 
 
 def write_string(s, out=None, encoding=None):
-    if out is None:
-        out = sys.stderr
-    assert type(s) == compat_str
+    assert isinstance(s, str)
+    out = out or sys.stderr
 
     if 'b' in getattr(out, 'mode', ''):
         byt = s.encode(encoding or preferredencoding(), 'ignore')
@@ -2483,18 +2460,10 @@ def parse_duration(s):
             else:
                 return None
 
-    duration = 0
-    if secs:
-        duration += float(secs)
-    if mins:
-        duration += float(mins) * 60
-    if hours:
-        duration += float(hours) * 60 * 60
-    if days:
-        duration += float(days) * 24 * 60 * 60
     if ms:
-        duration += float(ms.replace(':', '.'))
-    return duration
+        ms = ms.replace(':', '.')
+    return sum(float(part or 0) * mult for part, mult in (
+        (days, 86400), (hours, 3600), (mins, 60), (secs, 1), (ms, 1)))
 
 
 def prepend_extension(filename, ext, expected_real_ext=None):
@@ -2957,9 +2926,10 @@ def encode_compat_str(string, encoding=preferredencoding(), errors='strict'):
 
 
 def parse_age_limit(s):
-    if type(s) == int:
+    # isinstance(False, int) is True. So type() must be used instead
+    if type(s) is int:
         return s if 0 <= s <= 21 else None
-    if not isinstance(s, str):
+    elif not isinstance(s, str):
         return None
     m = re.match(r'^(?P<age>\d{1,2})\+?$', s)
     if m:
@@ -3227,7 +3197,7 @@ def parse_codecs(codecs_str):
             if not tcodec:
                 tcodec = full_codec
         else:
-            write_string('WARNING: Unknown codec %s\n' % full_codec, sys.stderr)
+            write_string(f'WARNING: Unknown codec {full_codec}\n')
     if vcodec or acodec or tcodec:
         return {
             'vcodec': vcodec or 'none',
@@ -4934,7 +4904,7 @@ def get_executable_path():
 
 def load_plugins(name, suffix, namespace):
     classes = {}
-    try:
+    with contextlib.suppress(FileNotFoundError):
         plugins_spec = importlib.util.spec_from_file_location(
             name, os.path.join(get_executable_path(), 'ytdlp_plugins', name, '__init__.py'))
         plugins = importlib.util.module_from_spec(plugins_spec)
@@ -4947,8 +4917,6 @@ def load_plugins(name, suffix, namespace):
                 continue
             klass = getattr(plugins, name)
             classes[name] = namespace[name] = klass
-    except FileNotFoundError:
-        pass
     return classes
 
 
@@ -4957,13 +4925,14 @@ def traverse_obj(
         casesense=True, is_user_input=False, traverse_string=False):
     ''' Traverse nested list/dict/tuple
     @param path_list        A list of paths which are checked one by one.
-                            Each path is a list of keys where each key is a string,
-                            a function, a tuple of strings/None or "...".
-                            When a fuction is given, it takes the key and value as arguments
-                            and returns whether the key matches or not. When a tuple is given,
-                            all the keys given in the tuple are traversed, and
-                            "..." traverses all the keys in the object
-                            "None" returns the object without traversal
+                            Each path is a list of keys where each key is a:
+                              - None:     Do nothing
+                              - string:   A dictionary key
+                              - int:      An index into a list
+                              - tuple:    A list of keys all of which will be traversed
+                              - Ellipsis: Fetch all values in the object
+                              - Function: Takes the key and value as arguments
+                                          and returns whether the key matches or not
     @param default          Default value to return
     @param expected_type    Only accept final value of this type (Can also be any callable)
     @param get_all          Return all the values obtained from a path or only the first one
@@ -5253,7 +5222,7 @@ def all_args(self):
         yield from self.own_args or []
 
     def parse_args(self):
-        return self._parser.parse_args(list(self.all_args))
+        return self._parser.parse_args(self.all_args)
 
 
 class WebSocketsWrapper():
@@ -5339,3 +5308,7 @@ def __init__(self, f):
 
     def __get__(self, _, cls):
         return self.f(cls)
+
+
+def Namespace(**kwargs):
+    return collections.namedtuple('Namespace', kwargs)(**kwargs)
index 3180eafde9d0452a35d89778a91a2474e5ae3037..741622b25b777ee75c5eb29da5059a88327ec610 100644 (file)
@@ -103,14 +103,8 @@ def _parse_ts(ts):
     Convert a parsed WebVTT timestamp (a re.Match obtained from _REGEX_TS)
     into an MPEG PES timestamp: a tick counter at 90 kHz resolution.
     """
-
-    h, min, s, ms = ts.groups()
-    return 90 * (
-        int(h or 0) * 3600000 +  # noqa: W504,E221,E222
-        int(min)    *   60000 +  # noqa: W504,E221,E222
-        int(s)      *    1000 +  # noqa: W504,E221,E222
-        int(ms)                  # noqa: W504,E221,E222
-    )
+    return 90 * sum(
+        int(part or 0) * mult for part, mult in zip(ts.groups(), (3600_000, 60_000, 1000, 1)))
 
 
 def _format_ts(ts):