]> jfr.im git - yt-dlp.git/blame - youtube_dl/utils.py
Start moving to ytdl-org
[yt-dlp.git] / youtube_dl / utils.py
CommitLineData
d77c3dfd 1#!/usr/bin/env python
dcdb292f 2# coding: utf-8
d77c3dfd 3
ecc0c5ee
PH
4from __future__ import unicode_literals
5
1e399778 6import base64
5bc880b9 7import binascii
912b38b4 8import calendar
676eb3f2 9import codecs
62e609ab 10import contextlib
e3946f98 11import ctypes
c496ca96
PH
12import datetime
13import email.utils
0c265486 14import email.header
f45c185f 15import errno
be4a824d 16import functools
d77c3dfd 17import gzip
03f9daab 18import io
79a2e94e 19import itertools
f4bfd65f 20import json
d77c3dfd 21import locale
02dbf93f 22import math
347de493 23import operator
d77c3dfd 24import os
c496ca96 25import platform
773f291d 26import random
d77c3dfd 27import re
c496ca96 28import socket
79a2e94e 29import ssl
1c088fa8 30import subprocess
d77c3dfd 31import sys
181c8655 32import tempfile
01951dda 33import traceback
bcf89ce6 34import xml.etree.ElementTree
d77c3dfd 35import zlib
d77c3dfd 36
8c25f81b 37from .compat import (
b4a3d461 38 compat_HTMLParseError,
8bb56eee 39 compat_HTMLParser,
8f9312c3 40 compat_basestring,
8c25f81b 41 compat_chr,
1bab3437 42 compat_cookiejar,
d7cd9a9e 43 compat_ctypes_WINFUNCTYPE,
36e6f62c 44 compat_etree_fromstring,
51098426 45 compat_expanduser,
8c25f81b 46 compat_html_entities,
55b2f099 47 compat_html_entities_html5,
be4a824d 48 compat_http_client,
c86b6142 49 compat_kwargs,
efa97bdc 50 compat_os_name,
8c25f81b 51 compat_parse_qs,
702ccf2d 52 compat_shlex_quote,
8c25f81b 53 compat_str,
edaa23f8 54 compat_struct_pack,
d3f8e038 55 compat_struct_unpack,
8c25f81b
PH
56 compat_urllib_error,
57 compat_urllib_parse,
15707c7e 58 compat_urllib_parse_urlencode,
8c25f81b 59 compat_urllib_parse_urlparse,
7581bfc9 60 compat_urllib_parse_unquote_plus,
8c25f81b
PH
61 compat_urllib_request,
62 compat_urlparse,
810c10ba 63 compat_xpath,
8c25f81b 64)
4644ac55 65
71aff188
YCH
66from .socks import (
67 ProxyType,
68 sockssocket,
69)
70
4644ac55 71
51fb4995
YCH
72def register_socks_protocols():
73 # "Register" SOCKS protocols
d5ae6bb5
YCH
74 # In Python < 2.6.5, urlsplit() suffers from bug https://bugs.python.org/issue7904
75 # URLs with protocols not in urlparse.uses_netloc are not handled correctly
51fb4995
YCH
76 for scheme in ('socks', 'socks4', 'socks4a', 'socks5'):
77 if scheme not in compat_urlparse.uses_netloc:
78 compat_urlparse.uses_netloc.append(scheme)
79
80
468e2e92
FV
81# This is not clearly defined otherwise
82compiled_regex_type = type(re.compile(''))
83
3e669f36 84std_headers = {
60c08562 85 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:59.0) Gecko/20100101 Firefox/59.0',
59ae15a5
PH
86 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
87 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
88 'Accept-Encoding': 'gzip, deflate',
89 'Accept-Language': 'en-us,en;q=0.5',
3e669f36 90}
f427df17 91
5f6a1245 92
fb37eb25
S
93USER_AGENTS = {
94 'Safari': 'Mozilla/5.0 (X11; Linux x86_64; rv:10.0) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27',
95}
96
97
bf42a990
S
98NO_DEFAULT = object()
99
7105440c
YCH
100ENGLISH_MONTH_NAMES = [
101 'January', 'February', 'March', 'April', 'May', 'June',
102 'July', 'August', 'September', 'October', 'November', 'December']
103
f6717dec
S
104MONTH_NAMES = {
105 'en': ENGLISH_MONTH_NAMES,
106 'fr': [
3e4185c3
S
107 'janvier', 'février', 'mars', 'avril', 'mai', 'juin',
108 'juillet', 'août', 'septembre', 'octobre', 'novembre', 'décembre'],
f6717dec 109}
a942d6cb 110
a7aaa398
S
111KNOWN_EXTENSIONS = (
112 'mp4', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'aac',
113 'flv', 'f4v', 'f4a', 'f4b',
114 'webm', 'ogg', 'ogv', 'oga', 'ogx', 'spx', 'opus',
115 'mkv', 'mka', 'mk3d',
116 'avi', 'divx',
117 'mov',
118 'asf', 'wmv', 'wma',
119 '3gp', '3g2',
120 'mp3',
121 'flac',
122 'ape',
123 'wav',
124 'f4f', 'f4m', 'm3u8', 'smil')
125
c587cbb7 126# needed for sanitizing filenames in restricted mode
c8827027 127ACCENT_CHARS = dict(zip('ÂÃÄÀÁÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖŐØŒÙÚÛÜŰÝÞßàáâãäåæçèéêëìíîïðñòóôõöőøœùúûüűýþÿ',
128 itertools.chain('AAAAAA', ['AE'], 'CEEEEIIIIDNOOOOOOO', ['OE'], 'UUUUUYP', ['ss'],
129 'aaaaaa', ['ae'], 'ceeeeiiiionooooooo', ['oe'], 'uuuuuypy')))
c587cbb7 130
46f59e89
S
131DATE_FORMATS = (
132 '%d %B %Y',
133 '%d %b %Y',
134 '%B %d %Y',
cb655f34
S
135 '%B %dst %Y',
136 '%B %dnd %Y',
137 '%B %dth %Y',
46f59e89 138 '%b %d %Y',
cb655f34
S
139 '%b %dst %Y',
140 '%b %dnd %Y',
141 '%b %dth %Y',
46f59e89
S
142 '%b %dst %Y %I:%M',
143 '%b %dnd %Y %I:%M',
144 '%b %dth %Y %I:%M',
145 '%Y %m %d',
146 '%Y-%m-%d',
147 '%Y/%m/%d',
81c13222 148 '%Y/%m/%d %H:%M',
46f59e89 149 '%Y/%m/%d %H:%M:%S',
0c1c6f4b 150 '%Y-%m-%d %H:%M',
46f59e89
S
151 '%Y-%m-%d %H:%M:%S',
152 '%Y-%m-%d %H:%M:%S.%f',
153 '%d.%m.%Y %H:%M',
154 '%d.%m.%Y %H.%M',
155 '%Y-%m-%dT%H:%M:%SZ',
156 '%Y-%m-%dT%H:%M:%S.%fZ',
157 '%Y-%m-%dT%H:%M:%S.%f0Z',
158 '%Y-%m-%dT%H:%M:%S',
159 '%Y-%m-%dT%H:%M:%S.%f',
160 '%Y-%m-%dT%H:%M',
c6eed6b8
S
161 '%b %d %Y at %H:%M',
162 '%b %d %Y at %H:%M:%S',
b555ae9b
S
163 '%B %d %Y at %H:%M',
164 '%B %d %Y at %H:%M:%S',
46f59e89
S
165)
166
167DATE_FORMATS_DAY_FIRST = list(DATE_FORMATS)
168DATE_FORMATS_DAY_FIRST.extend([
169 '%d-%m-%Y',
170 '%d.%m.%Y',
171 '%d.%m.%y',
172 '%d/%m/%Y',
173 '%d/%m/%y',
174 '%d/%m/%Y %H:%M:%S',
175])
176
177DATE_FORMATS_MONTH_FIRST = list(DATE_FORMATS)
178DATE_FORMATS_MONTH_FIRST.extend([
179 '%m-%d-%Y',
180 '%m.%d.%Y',
181 '%m/%d/%Y',
182 '%m/%d/%y',
183 '%m/%d/%Y %H:%M:%S',
184])
185
06b3fe29 186PACKED_CODES_RE = r"}\('(.+)',(\d+),(\d+),'([^']+)'\.split\('\|'\)"
22f5f5c6 187JSON_LD_RE = r'(?is)<script[^>]+type=(["\']?)application/ld\+json\1[^>]*>(?P<json_ld>.+?)</script>'
06b3fe29 188
7105440c 189
d77c3dfd 190def preferredencoding():
59ae15a5 191 """Get preferred encoding.
d77c3dfd 192
59ae15a5
PH
193 Returns the best encoding scheme for the system, based on
194 locale.getpreferredencoding() and some further tweaks.
195 """
196 try:
197 pref = locale.getpreferredencoding()
28e614de 198 'TEST'.encode(pref)
70a1165b 199 except Exception:
59ae15a5 200 pref = 'UTF-8'
bae611f2 201
59ae15a5 202 return pref
d77c3dfd 203
f4bfd65f 204
181c8655 205def write_json_file(obj, fn):
1394646a 206 """ Encode obj as JSON and write it to fn, atomically if possible """
181c8655 207
92120217 208 fn = encodeFilename(fn)
61ee5aeb 209 if sys.version_info < (3, 0) and sys.platform != 'win32':
ec5f6016
JMF
210 encoding = get_filesystem_encoding()
211 # os.path.basename returns a bytes object, but NamedTemporaryFile
212 # will fail if the filename contains non ascii characters unless we
213 # use a unicode object
214 path_basename = lambda f: os.path.basename(fn).decode(encoding)
215 # the same for os.path.dirname
216 path_dirname = lambda f: os.path.dirname(fn).decode(encoding)
217 else:
218 path_basename = os.path.basename
219 path_dirname = os.path.dirname
220
73159f99
S
221 args = {
222 'suffix': '.tmp',
ec5f6016
JMF
223 'prefix': path_basename(fn) + '.',
224 'dir': path_dirname(fn),
73159f99
S
225 'delete': False,
226 }
227
181c8655
PH
228 # In Python 2.x, json.dump expects a bytestream.
229 # In Python 3.x, it writes to a character stream
230 if sys.version_info < (3, 0):
73159f99 231 args['mode'] = 'wb'
181c8655 232 else:
73159f99
S
233 args.update({
234 'mode': 'w',
235 'encoding': 'utf-8',
236 })
237
c86b6142 238 tf = tempfile.NamedTemporaryFile(**compat_kwargs(args))
181c8655
PH
239
240 try:
241 with tf:
242 json.dump(obj, tf)
1394646a
IK
243 if sys.platform == 'win32':
244 # Need to remove existing file on Windows, else os.rename raises
245 # WindowsError or FileExistsError.
246 try:
247 os.unlink(fn)
248 except OSError:
249 pass
181c8655 250 os.rename(tf.name, fn)
70a1165b 251 except Exception:
181c8655
PH
252 try:
253 os.remove(tf.name)
254 except OSError:
255 pass
256 raise
257
258
259if sys.version_info >= (2, 7):
ee114368 260 def find_xpath_attr(node, xpath, key, val=None):
59ae56fa 261 """ Find the xpath xpath[@key=val] """
5d2354f1 262 assert re.match(r'^[a-zA-Z_-]+$', key)
ee114368 263 expr = xpath + ('[@%s]' % key if val is None else "[@%s='%s']" % (key, val))
59ae56fa
PH
264 return node.find(expr)
265else:
ee114368 266 def find_xpath_attr(node, xpath, key, val=None):
810c10ba 267 for f in node.findall(compat_xpath(xpath)):
ee114368
S
268 if key not in f.attrib:
269 continue
270 if val is None or f.attrib.get(key) == val:
59ae56fa
PH
271 return f
272 return None
273
d7e66d39
JMF
274# On python2.6 the xml.etree.ElementTree.Element methods don't support
275# the namespace parameter
5f6a1245
JW
276
277
d7e66d39
JMF
278def xpath_with_ns(path, ns_map):
279 components = [c.split(':') for c in path.split('/')]
280 replaced = []
281 for c in components:
282 if len(c) == 1:
283 replaced.append(c[0])
284 else:
285 ns, tag = c
286 replaced.append('{%s}%s' % (ns_map[ns], tag))
287 return '/'.join(replaced)
288
d77c3dfd 289
a41fb80c 290def xpath_element(node, xpath, name=None, fatal=False, default=NO_DEFAULT):
578c0745 291 def _find_xpath(xpath):
810c10ba 292 return node.find(compat_xpath(xpath))
578c0745
S
293
294 if isinstance(xpath, (str, compat_str)):
295 n = _find_xpath(xpath)
296 else:
297 for xp in xpath:
298 n = _find_xpath(xp)
299 if n is not None:
300 break
d74bebd5 301
8e636da4 302 if n is None:
bf42a990
S
303 if default is not NO_DEFAULT:
304 return default
305 elif fatal:
bf0ff932
PH
306 name = xpath if name is None else name
307 raise ExtractorError('Could not find XML element %s' % name)
308 else:
309 return None
a41fb80c
S
310 return n
311
312
313def xpath_text(node, xpath, name=None, fatal=False, default=NO_DEFAULT):
8e636da4
S
314 n = xpath_element(node, xpath, name, fatal=fatal, default=default)
315 if n is None or n == default:
316 return n
317 if n.text is None:
318 if default is not NO_DEFAULT:
319 return default
320 elif fatal:
321 name = xpath if name is None else name
322 raise ExtractorError('Could not find XML element\'s text %s' % name)
323 else:
324 return None
325 return n.text
a41fb80c
S
326
327
328def xpath_attr(node, xpath, key, name=None, fatal=False, default=NO_DEFAULT):
329 n = find_xpath_attr(node, xpath, key)
330 if n is None:
331 if default is not NO_DEFAULT:
332 return default
333 elif fatal:
334 name = '%s[@%s]' % (xpath, key) if name is None else name
335 raise ExtractorError('Could not find XML attribute %s' % name)
336 else:
337 return None
338 return n.attrib[key]
bf0ff932
PH
339
340
9e6dd238 341def get_element_by_id(id, html):
43e8fafd 342 """Return the content of the tag with the specified ID in the passed HTML document"""
611c1dd9 343 return get_element_by_attribute('id', id, html)
43e8fafd 344
12ea2f30 345
84c237fb 346def get_element_by_class(class_name, html):
2af12ad9
TC
347 """Return the content of the first tag with the specified class in the passed HTML document"""
348 retval = get_elements_by_class(class_name, html)
349 return retval[0] if retval else None
350
351
352def get_element_by_attribute(attribute, value, html, escape_value=True):
353 retval = get_elements_by_attribute(attribute, value, html, escape_value)
354 return retval[0] if retval else None
355
356
357def get_elements_by_class(class_name, html):
358 """Return the content of all tags with the specified class in the passed HTML document as a list"""
359 return get_elements_by_attribute(
84c237fb
YCH
360 'class', r'[^\'"]*\b%s\b[^\'"]*' % re.escape(class_name),
361 html, escape_value=False)
362
363
2af12ad9 364def get_elements_by_attribute(attribute, value, html, escape_value=True):
43e8fafd 365 """Return the content of the tag with the specified attribute in the passed HTML document"""
9e6dd238 366
84c237fb
YCH
367 value = re.escape(value) if escape_value else value
368
2af12ad9
TC
369 retlist = []
370 for m in re.finditer(r'''(?xs)
38285056 371 <([a-zA-Z0-9:._-]+)
609ff8ca 372 (?:\s+[a-zA-Z0-9:._-]+(?:=[a-zA-Z0-9:._-]*|="[^"]*"|='[^']*'|))*?
38285056 373 \s+%s=['"]?%s['"]?
609ff8ca 374 (?:\s+[a-zA-Z0-9:._-]+(?:=[a-zA-Z0-9:._-]*|="[^"]*"|='[^']*'|))*?
38285056
PH
375 \s*>
376 (?P<content>.*?)
377 </\1>
2af12ad9
TC
378 ''' % (re.escape(attribute), value), html):
379 res = m.group('content')
38285056 380
2af12ad9
TC
381 if res.startswith('"') or res.startswith("'"):
382 res = res[1:-1]
38285056 383
2af12ad9 384 retlist.append(unescapeHTML(res))
a921f407 385
2af12ad9 386 return retlist
a921f407 387
c5229f39 388
8bb56eee
BF
389class HTMLAttributeParser(compat_HTMLParser):
390 """Trivial HTML parser to gather the attributes for a single element"""
391 def __init__(self):
c5229f39 392 self.attrs = {}
8bb56eee
BF
393 compat_HTMLParser.__init__(self)
394
395 def handle_starttag(self, tag, attrs):
396 self.attrs = dict(attrs)
397
c5229f39 398
8bb56eee
BF
399def extract_attributes(html_element):
400 """Given a string for an HTML element such as
401 <el
402 a="foo" B="bar" c="&98;az" d=boz
403 empty= noval entity="&amp;"
404 sq='"' dq="'"
405 >
406 Decode and return a dictionary of attributes.
407 {
408 'a': 'foo', 'b': 'bar', c: 'baz', d: 'boz',
409 'empty': '', 'noval': None, 'entity': '&',
410 'sq': '"', 'dq': '\''
411 }.
412 NB HTMLParser is stricter in Python 2.6 & 3.2 than in later versions,
413 but the cases in the unit test will work for all of 2.6, 2.7, 3.2-3.5.
414 """
415 parser = HTMLAttributeParser()
b4a3d461
S
416 try:
417 parser.feed(html_element)
418 parser.close()
419 # Older Python may throw HTMLParseError in case of malformed HTML
420 except compat_HTMLParseError:
421 pass
8bb56eee 422 return parser.attrs
9e6dd238 423
c5229f39 424
9e6dd238 425def clean_html(html):
59ae15a5 426 """Clean an HTML snippet into a readable string"""
dd622d7c
PH
427
428 if html is None: # Convenience for sanitizing descriptions etc.
429 return html
430
59ae15a5
PH
431 # Newline vs <br />
432 html = html.replace('\n', ' ')
edd9221c
TF
433 html = re.sub(r'(?u)\s*<\s*br\s*/?\s*>\s*', '\n', html)
434 html = re.sub(r'(?u)<\s*/\s*p\s*>\s*<\s*p[^>]*>', '\n', html)
59ae15a5
PH
435 # Strip html tags
436 html = re.sub('<.*?>', '', html)
437 # Replace html entities
438 html = unescapeHTML(html)
7decf895 439 return html.strip()
9e6dd238
FV
440
441
d77c3dfd 442def sanitize_open(filename, open_mode):
59ae15a5
PH
443 """Try to open the given filename, and slightly tweak it if this fails.
444
445 Attempts to open the given filename. If this fails, it tries to change
446 the filename slightly, step by step, until it's either able to open it
447 or it fails and raises a final exception, like the standard open()
448 function.
449
450 It returns the tuple (stream, definitive_file_name).
451 """
452 try:
28e614de 453 if filename == '-':
59ae15a5
PH
454 if sys.platform == 'win32':
455 import msvcrt
456 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
898280a0 457 return (sys.stdout.buffer if hasattr(sys.stdout, 'buffer') else sys.stdout, filename)
59ae15a5
PH
458 stream = open(encodeFilename(filename), open_mode)
459 return (stream, filename)
460 except (IOError, OSError) as err:
f45c185f
PH
461 if err.errno in (errno.EACCES,):
462 raise
59ae15a5 463
f45c185f 464 # In case of error, try to remove win32 forbidden chars
d55de57b 465 alt_filename = sanitize_path(filename)
f45c185f
PH
466 if alt_filename == filename:
467 raise
468 else:
469 # An exception here should be caught in the caller
d55de57b 470 stream = open(encodeFilename(alt_filename), open_mode)
f45c185f 471 return (stream, alt_filename)
d77c3dfd
FV
472
473
474def timeconvert(timestr):
59ae15a5
PH
475 """Convert RFC 2822 defined time string into system timestamp"""
476 timestamp = None
477 timetuple = email.utils.parsedate_tz(timestr)
478 if timetuple is not None:
479 timestamp = email.utils.mktime_tz(timetuple)
480 return timestamp
1c469a94 481
5f6a1245 482
796173d0 483def sanitize_filename(s, restricted=False, is_id=False):
59ae15a5
PH
484 """Sanitizes a string so it could be used as part of a filename.
485 If restricted is set, use a stricter subset of allowed characters.
158af524
S
486 Set is_id if this is not an arbitrary string, but an ID that should be kept
487 if possible.
59ae15a5
PH
488 """
489 def replace_insane(char):
c587cbb7
AT
490 if restricted and char in ACCENT_CHARS:
491 return ACCENT_CHARS[char]
59ae15a5
PH
492 if char == '?' or ord(char) < 32 or ord(char) == 127:
493 return ''
494 elif char == '"':
495 return '' if restricted else '\''
496 elif char == ':':
497 return '_-' if restricted else ' -'
498 elif char in '\\/|*<>':
499 return '_'
627dcfff 500 if restricted and (char in '!&\'()[]{}$;`^,#' or char.isspace()):
59ae15a5
PH
501 return '_'
502 if restricted and ord(char) > 127:
503 return '_'
504 return char
505
2aeb06d6
PH
506 # Handle timestamps
507 s = re.sub(r'[0-9]+(?::[0-9]+)+', lambda m: m.group(0).replace(':', '_'), s)
28e614de 508 result = ''.join(map(replace_insane, s))
796173d0
PH
509 if not is_id:
510 while '__' in result:
511 result = result.replace('__', '_')
512 result = result.strip('_')
513 # Common case of "Foreign band name - English song title"
514 if restricted and result.startswith('-_'):
515 result = result[2:]
5a42414b
PH
516 if result.startswith('-'):
517 result = '_' + result[len('-'):]
a7440261 518 result = result.lstrip('.')
796173d0
PH
519 if not result:
520 result = '_'
59ae15a5 521 return result
d77c3dfd 522
5f6a1245 523
a2aaf4db
S
524def sanitize_path(s):
525 """Sanitizes and normalizes path on Windows"""
526 if sys.platform != 'win32':
527 return s
be531ef1
S
528 drive_or_unc, _ = os.path.splitdrive(s)
529 if sys.version_info < (2, 7) and not drive_or_unc:
530 drive_or_unc, _ = os.path.splitunc(s)
531 norm_path = os.path.normpath(remove_start(s, drive_or_unc)).split(os.path.sep)
532 if drive_or_unc:
a2aaf4db
S
533 norm_path.pop(0)
534 sanitized_path = [
ec85ded8 535 path_part if path_part in ['.', '..'] else re.sub(r'(?:[/<>:"\|\\?\*]|[\s.]$)', '#', path_part)
a2aaf4db 536 for path_part in norm_path]
be531ef1
S
537 if drive_or_unc:
538 sanitized_path.insert(0, drive_or_unc + os.path.sep)
a2aaf4db
S
539 return os.path.join(*sanitized_path)
540
541
17bcc626 542def sanitize_url(url):
befa4708
S
543 # Prepend protocol-less URLs with `http:` scheme in order to mitigate
544 # the number of unwanted failures due to missing protocol
545 if url.startswith('//'):
546 return 'http:%s' % url
547 # Fix some common typos seen so far
548 COMMON_TYPOS = (
067aa17e 549 # https://github.com/ytdl-org/youtube-dl/issues/15649
befa4708
S
550 (r'^httpss://', r'https://'),
551 # https://bx1.be/lives/direct-tv/
552 (r'^rmtp([es]?)://', r'rtmp\1://'),
553 )
554 for mistake, fixup in COMMON_TYPOS:
555 if re.match(mistake, url):
556 return re.sub(mistake, fixup, url)
557 return url
17bcc626
S
558
559
67dda517 560def sanitized_Request(url, *args, **kwargs):
17bcc626 561 return compat_urllib_request.Request(sanitize_url(url), *args, **kwargs)
67dda517
S
562
563
51098426
S
564def expand_path(s):
565 """Expand shell variables and ~"""
566 return os.path.expandvars(compat_expanduser(s))
567
568
d77c3dfd 569def orderedSet(iterable):
59ae15a5
PH
570 """ Remove all duplicates from the input iterable """
571 res = []
572 for el in iterable:
573 if el not in res:
574 res.append(el)
575 return res
d77c3dfd 576
912b38b4 577
55b2f099 578def _htmlentity_transform(entity_with_semicolon):
4e408e47 579 """Transforms an HTML entity to a character."""
55b2f099
YCH
580 entity = entity_with_semicolon[:-1]
581
4e408e47
PH
582 # Known non-numeric HTML entity
583 if entity in compat_html_entities.name2codepoint:
584 return compat_chr(compat_html_entities.name2codepoint[entity])
585
55b2f099
YCH
586 # TODO: HTML5 allows entities without a semicolon. For example,
587 # '&Eacuteric' should be decoded as 'Éric'.
588 if entity_with_semicolon in compat_html_entities_html5:
589 return compat_html_entities_html5[entity_with_semicolon]
590
91757b0f 591 mobj = re.match(r'#(x[0-9a-fA-F]+|[0-9]+)', entity)
4e408e47
PH
592 if mobj is not None:
593 numstr = mobj.group(1)
28e614de 594 if numstr.startswith('x'):
4e408e47 595 base = 16
28e614de 596 numstr = '0%s' % numstr
4e408e47
PH
597 else:
598 base = 10
067aa17e 599 # See https://github.com/ytdl-org/youtube-dl/issues/7518
7aefc49c
S
600 try:
601 return compat_chr(int(numstr, base))
602 except ValueError:
603 pass
4e408e47
PH
604
605 # Unknown entity in name, return its literal representation
7a3f0c00 606 return '&%s;' % entity
4e408e47
PH
607
608
d77c3dfd 609def unescapeHTML(s):
912b38b4
PH
610 if s is None:
611 return None
612 assert type(s) == compat_str
d77c3dfd 613
4e408e47 614 return re.sub(
95f3f7c2 615 r'&([^&;]+;)', lambda m: _htmlentity_transform(m.group(1)), s)
d77c3dfd 616
8bf48f23 617
aa49acd1
S
618def get_subprocess_encoding():
619 if sys.platform == 'win32' and sys.getwindowsversion()[0] >= 5:
620 # For subprocess calls, encode with locale encoding
621 # Refer to http://stackoverflow.com/a/9951851/35070
622 encoding = preferredencoding()
623 else:
624 encoding = sys.getfilesystemencoding()
625 if encoding is None:
626 encoding = 'utf-8'
627 return encoding
628
629
8bf48f23 630def encodeFilename(s, for_subprocess=False):
59ae15a5
PH
631 """
632 @param s The name of the file
633 """
d77c3dfd 634
8bf48f23 635 assert type(s) == compat_str
d77c3dfd 636
59ae15a5
PH
637 # Python 3 has a Unicode API
638 if sys.version_info >= (3, 0):
639 return s
0f00efed 640
aa49acd1
S
641 # Pass '' directly to use Unicode APIs on Windows 2000 and up
642 # (Detecting Windows NT 4 is tricky because 'major >= 4' would
643 # match Windows 9x series as well. Besides, NT 4 is obsolete.)
644 if not for_subprocess and sys.platform == 'win32' and sys.getwindowsversion()[0] >= 5:
645 return s
646
8ee239e9
YCH
647 # Jython assumes filenames are Unicode strings though reported as Python 2.x compatible
648 if sys.platform.startswith('java'):
649 return s
650
aa49acd1
S
651 return s.encode(get_subprocess_encoding(), 'ignore')
652
653
654def decodeFilename(b, for_subprocess=False):
655
656 if sys.version_info >= (3, 0):
657 return b
658
659 if not isinstance(b, bytes):
660 return b
661
662 return b.decode(get_subprocess_encoding(), 'ignore')
8bf48f23 663
f07b74fc
PH
664
665def encodeArgument(s):
666 if not isinstance(s, compat_str):
667 # Legacy code that uses byte strings
668 # Uncomment the following line after fixing all post processors
7af808a5 669 # assert False, 'Internal error: %r should be of type %r, is %r' % (s, compat_str, type(s))
f07b74fc
PH
670 s = s.decode('ascii')
671 return encodeFilename(s, True)
672
673
aa49acd1
S
674def decodeArgument(b):
675 return decodeFilename(b, True)
676
677
8271226a
PH
678def decodeOption(optval):
679 if optval is None:
680 return optval
681 if isinstance(optval, bytes):
682 optval = optval.decode(preferredencoding())
683
684 assert isinstance(optval, compat_str)
685 return optval
1c256f70 686
5f6a1245 687
4539dd30
PH
688def formatSeconds(secs):
689 if secs > 3600:
690 return '%d:%02d:%02d' % (secs // 3600, (secs % 3600) // 60, secs % 60)
691 elif secs > 60:
692 return '%d:%02d' % (secs // 60, secs % 60)
693 else:
694 return '%d' % secs
695
a0ddb8a2 696
be4a824d
PH
697def make_HTTPS_handler(params, **kwargs):
698 opts_no_check_certificate = params.get('nocheckcertificate', False)
0db261ba 699 if hasattr(ssl, 'create_default_context'): # Python >= 3.4 or 2.7.9
be5f2c19 700 context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
0db261ba 701 if opts_no_check_certificate:
be5f2c19 702 context.check_hostname = False
0db261ba 703 context.verify_mode = ssl.CERT_NONE
a2366922 704 try:
be4a824d 705 return YoutubeDLHTTPSHandler(params, context=context, **kwargs)
a2366922
PH
706 except TypeError:
707 # Python 2.7.8
708 # (create_default_context present but HTTPSHandler has no context=)
709 pass
710
711 if sys.version_info < (3, 2):
d7932313 712 return YoutubeDLHTTPSHandler(params, **kwargs)
aa37e3d4 713 else: # Python < 3.4
d7932313 714 context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ea6d901e 715 context.verify_mode = (ssl.CERT_NONE
dca08720 716 if opts_no_check_certificate
ea6d901e 717 else ssl.CERT_REQUIRED)
303b479e 718 context.set_default_verify_paths()
be4a824d 719 return YoutubeDLHTTPSHandler(params, context=context, **kwargs)
ea6d901e 720
732ea2f0 721
08f2a92c
JMF
722def bug_reports_message():
723 if ytdl_is_updateable():
724 update_cmd = 'type youtube-dl -U to update'
725 else:
726 update_cmd = 'see https://yt-dl.org/update on how to update'
727 msg = '; please report this issue on https://yt-dl.org/bug .'
728 msg += ' Make sure you are using the latest version; %s.' % update_cmd
729 msg += ' Be sure to call youtube-dl with the --verbose flag and include its complete output.'
730 return msg
731
732
bf5b9d85
PM
733class YoutubeDLError(Exception):
734 """Base exception for YoutubeDL errors."""
735 pass
736
737
738class ExtractorError(YoutubeDLError):
1c256f70 739 """Error during info extraction."""
5f6a1245 740
d11271dd 741 def __init__(self, msg, tb=None, expected=False, cause=None, video_id=None):
9a82b238
PH
742 """ tb, if given, is the original traceback (so that it can be printed out).
743 If expected is set, this is a normal error message and most likely not a bug in youtube-dl.
744 """
745
746 if sys.exc_info()[0] in (compat_urllib_error.URLError, socket.timeout, UnavailableVideoError):
747 expected = True
d11271dd
PH
748 if video_id is not None:
749 msg = video_id + ': ' + msg
410f3e73 750 if cause:
28e614de 751 msg += ' (caused by %r)' % cause
9a82b238 752 if not expected:
08f2a92c 753 msg += bug_reports_message()
1c256f70 754 super(ExtractorError, self).__init__(msg)
d5979c5d 755
1c256f70 756 self.traceback = tb
8cc83b8d 757 self.exc_info = sys.exc_info() # preserve original exception
2eabb802 758 self.cause = cause
d11271dd 759 self.video_id = video_id
1c256f70 760
01951dda
PH
761 def format_traceback(self):
762 if self.traceback is None:
763 return None
28e614de 764 return ''.join(traceback.format_tb(self.traceback))
01951dda 765
1c256f70 766
416c7fcb
PH
767class UnsupportedError(ExtractorError):
768 def __init__(self, url):
769 super(UnsupportedError, self).__init__(
770 'Unsupported URL: %s' % url, expected=True)
771 self.url = url
772
773
55b3e45b
JMF
774class RegexNotFoundError(ExtractorError):
775 """Error when a regex didn't match"""
776 pass
777
778
773f291d
S
779class GeoRestrictedError(ExtractorError):
780 """Geographic restriction Error exception.
781
782 This exception may be thrown when a video is not available from your
783 geographic location due to geographic restrictions imposed by a website.
784 """
785 def __init__(self, msg, countries=None):
786 super(GeoRestrictedError, self).__init__(msg, expected=True)
787 self.msg = msg
788 self.countries = countries
789
790
bf5b9d85 791class DownloadError(YoutubeDLError):
59ae15a5 792 """Download Error exception.
d77c3dfd 793
59ae15a5
PH
794 This exception may be thrown by FileDownloader objects if they are not
795 configured to continue on errors. They will contain the appropriate
796 error message.
797 """
5f6a1245 798
8cc83b8d
FV
799 def __init__(self, msg, exc_info=None):
800 """ exc_info, if given, is the original exception that caused the trouble (as returned by sys.exc_info()). """
801 super(DownloadError, self).__init__(msg)
802 self.exc_info = exc_info
d77c3dfd
FV
803
804
bf5b9d85 805class SameFileError(YoutubeDLError):
59ae15a5 806 """Same File exception.
d77c3dfd 807
59ae15a5
PH
808 This exception will be thrown by FileDownloader objects if they detect
809 multiple files would have to be downloaded to the same file on disk.
810 """
811 pass
d77c3dfd
FV
812
813
bf5b9d85 814class PostProcessingError(YoutubeDLError):
59ae15a5 815 """Post Processing exception.
d77c3dfd 816
59ae15a5
PH
817 This exception may be raised by PostProcessor's .run() method to
818 indicate an error in the postprocessing task.
819 """
5f6a1245 820
7851b379 821 def __init__(self, msg):
bf5b9d85 822 super(PostProcessingError, self).__init__(msg)
7851b379 823 self.msg = msg
d77c3dfd 824
5f6a1245 825
bf5b9d85 826class MaxDownloadsReached(YoutubeDLError):
59ae15a5
PH
827 """ --max-downloads limit has been reached. """
828 pass
d77c3dfd
FV
829
830
bf5b9d85 831class UnavailableVideoError(YoutubeDLError):
59ae15a5 832 """Unavailable Format exception.
d77c3dfd 833
59ae15a5
PH
834 This exception will be thrown when a video is requested
835 in a format that is not available for that video.
836 """
837 pass
d77c3dfd
FV
838
839
bf5b9d85 840class ContentTooShortError(YoutubeDLError):
59ae15a5 841 """Content Too Short exception.
d77c3dfd 842
59ae15a5
PH
843 This exception may be raised by FileDownloader objects when a file they
844 download is too small for what the server announced first, indicating
845 the connection was probably interrupted.
846 """
d77c3dfd 847
59ae15a5 848 def __init__(self, downloaded, expected):
bf5b9d85
PM
849 super(ContentTooShortError, self).__init__(
850 'Downloaded {0} bytes, expected {1} bytes'.format(downloaded, expected)
851 )
2c7ed247 852 # Both in bytes
59ae15a5
PH
853 self.downloaded = downloaded
854 self.expected = expected
d77c3dfd 855
5f6a1245 856
bf5b9d85 857class XAttrMetadataError(YoutubeDLError):
efa97bdc
YCH
858 def __init__(self, code=None, msg='Unknown error'):
859 super(XAttrMetadataError, self).__init__(msg)
860 self.code = code
bd264412 861 self.msg = msg
efa97bdc
YCH
862
863 # Parsing code and msg
864 if (self.code in (errno.ENOSPC, errno.EDQUOT) or
865 'No space left' in self.msg or 'Disk quota excedded' in self.msg):
866 self.reason = 'NO_SPACE'
867 elif self.code == errno.E2BIG or 'Argument list too long' in self.msg:
868 self.reason = 'VALUE_TOO_LONG'
869 else:
870 self.reason = 'NOT_SUPPORTED'
871
872
bf5b9d85 873class XAttrUnavailableError(YoutubeDLError):
efa97bdc
YCH
874 pass
875
876
c5a59d93 877def _create_http_connection(ydl_handler, http_class, is_https, *args, **kwargs):
e5e78797
S
878 # Working around python 2 bug (see http://bugs.python.org/issue17849) by limiting
879 # expected HTTP responses to meet HTTP/1.0 or later (see also
067aa17e 880 # https://github.com/ytdl-org/youtube-dl/issues/6727)
e5e78797 881 if sys.version_info < (3, 0):
65220c3b
S
882 kwargs['strict'] = True
883 hc = http_class(*args, **compat_kwargs(kwargs))
be4a824d 884 source_address = ydl_handler._params.get('source_address')
8959018a 885
be4a824d 886 if source_address is not None:
8959018a
AU
887 # This is to workaround _create_connection() from socket where it will try all
888 # address data from getaddrinfo() including IPv6. This filters the result from
889 # getaddrinfo() based on the source_address value.
890 # This is based on the cpython socket.create_connection() function.
891 # https://github.com/python/cpython/blob/master/Lib/socket.py#L691
892 def _create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None):
893 host, port = address
894 err = None
895 addrs = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
9e21e6d9
S
896 af = socket.AF_INET if '.' in source_address[0] else socket.AF_INET6
897 ip_addrs = [addr for addr in addrs if addr[0] == af]
898 if addrs and not ip_addrs:
899 ip_version = 'v4' if af == socket.AF_INET else 'v6'
900 raise socket.error(
901 "No remote IP%s addresses available for connect, can't use '%s' as source address"
902 % (ip_version, source_address[0]))
8959018a
AU
903 for res in ip_addrs:
904 af, socktype, proto, canonname, sa = res
905 sock = None
906 try:
907 sock = socket.socket(af, socktype, proto)
908 if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
909 sock.settimeout(timeout)
910 sock.bind(source_address)
911 sock.connect(sa)
912 err = None # Explicitly break reference cycle
913 return sock
914 except socket.error as _:
915 err = _
916 if sock is not None:
917 sock.close()
918 if err is not None:
919 raise err
920 else:
9e21e6d9
S
921 raise socket.error('getaddrinfo returns an empty list')
922 if hasattr(hc, '_create_connection'):
923 hc._create_connection = _create_connection
be4a824d
PH
924 sa = (source_address, 0)
925 if hasattr(hc, 'source_address'): # Python 2.7+
926 hc.source_address = sa
927 else: # Python 2.6
928 def _hc_connect(self, *args, **kwargs):
9e21e6d9 929 sock = _create_connection(
be4a824d
PH
930 (self.host, self.port), self.timeout, sa)
931 if is_https:
d7932313
PH
932 self.sock = ssl.wrap_socket(
933 sock, self.key_file, self.cert_file,
934 ssl_version=ssl.PROTOCOL_TLSv1)
be4a824d
PH
935 else:
936 self.sock = sock
937 hc.connect = functools.partial(_hc_connect, hc)
938
939 return hc
940
941
87f0e62d 942def handle_youtubedl_headers(headers):
992fc9d6
YCH
943 filtered_headers = headers
944
945 if 'Youtubedl-no-compression' in filtered_headers:
946 filtered_headers = dict((k, v) for k, v in filtered_headers.items() if k.lower() != 'accept-encoding')
87f0e62d 947 del filtered_headers['Youtubedl-no-compression']
87f0e62d 948
992fc9d6 949 return filtered_headers
87f0e62d
YCH
950
951
acebc9cd 952class YoutubeDLHandler(compat_urllib_request.HTTPHandler):
59ae15a5
PH
953 """Handler for HTTP requests and responses.
954
955 This class, when installed with an OpenerDirector, automatically adds
956 the standard headers to every HTTP request and handles gzipped and
957 deflated responses from web servers. If compression is to be avoided in
958 a particular request, the original request in the program code only has
0424ec30 959 to include the HTTP header "Youtubedl-no-compression", which will be
59ae15a5
PH
960 removed before making the real request.
961
962 Part of this code was copied from:
963
964 http://techknack.net/python-urllib2-handlers/
965
966 Andrew Rowls, the author of that code, agreed to release it to the
967 public domain.
968 """
969
be4a824d
PH
970 def __init__(self, params, *args, **kwargs):
971 compat_urllib_request.HTTPHandler.__init__(self, *args, **kwargs)
972 self._params = params
973
974 def http_open(self, req):
71aff188
YCH
975 conn_class = compat_http_client.HTTPConnection
976
977 socks_proxy = req.headers.get('Ytdl-socks-proxy')
978 if socks_proxy:
979 conn_class = make_socks_conn_class(conn_class, socks_proxy)
980 del req.headers['Ytdl-socks-proxy']
981
be4a824d 982 return self.do_open(functools.partial(
71aff188 983 _create_http_connection, self, conn_class, False),
be4a824d
PH
984 req)
985
59ae15a5
PH
986 @staticmethod
987 def deflate(data):
988 try:
989 return zlib.decompress(data, -zlib.MAX_WBITS)
990 except zlib.error:
991 return zlib.decompress(data)
992
acebc9cd 993 def http_request(self, req):
51f267d9
S
994 # According to RFC 3986, URLs can not contain non-ASCII characters, however this is not
995 # always respected by websites, some tend to give out URLs with non percent-encoded
996 # non-ASCII characters (see telemb.py, ard.py [#3412])
997 # urllib chokes on URLs with non-ASCII characters (see http://bugs.python.org/issue3991)
998 # To work around aforementioned issue we will replace request's original URL with
999 # percent-encoded one
1000 # Since redirects are also affected (e.g. http://www.southpark.de/alle-episoden/s18e09)
1001 # the code of this workaround has been moved here from YoutubeDL.urlopen()
1002 url = req.get_full_url()
1003 url_escaped = escape_url(url)
1004
1005 # Substitute URL if any change after escaping
1006 if url != url_escaped:
15d260eb 1007 req = update_Request(req, url=url_escaped)
51f267d9 1008
33ac271b 1009 for h, v in std_headers.items():
3d5f7a39
JK
1010 # Capitalize is needed because of Python bug 2275: http://bugs.python.org/issue2275
1011 # The dict keys are capitalized because of this bug by urllib
1012 if h.capitalize() not in req.headers:
33ac271b 1013 req.add_header(h, v)
87f0e62d
YCH
1014
1015 req.headers = handle_youtubedl_headers(req.headers)
989b4b2b
PH
1016
1017 if sys.version_info < (2, 7) and '#' in req.get_full_url():
1018 # Python 2.6 is brain-dead when it comes to fragments
1019 req._Request__original = req._Request__original.partition('#')[0]
1020 req._Request__r_type = req._Request__r_type.partition('#')[0]
1021
59ae15a5
PH
1022 return req
1023
acebc9cd 1024 def http_response(self, req, resp):
59ae15a5
PH
1025 old_resp = resp
1026 # gzip
1027 if resp.headers.get('Content-encoding', '') == 'gzip':
aa3e9507
PH
1028 content = resp.read()
1029 gz = gzip.GzipFile(fileobj=io.BytesIO(content), mode='rb')
1030 try:
1031 uncompressed = io.BytesIO(gz.read())
1032 except IOError as original_ioerror:
1033 # There may be junk add the end of the file
1034 # See http://stackoverflow.com/q/4928560/35070 for details
1035 for i in range(1, 1024):
1036 try:
1037 gz = gzip.GzipFile(fileobj=io.BytesIO(content[:-i]), mode='rb')
1038 uncompressed = io.BytesIO(gz.read())
1039 except IOError:
1040 continue
1041 break
1042 else:
1043 raise original_ioerror
b407d853 1044 resp = compat_urllib_request.addinfourl(uncompressed, old_resp.headers, old_resp.url, old_resp.code)
59ae15a5 1045 resp.msg = old_resp.msg
c047270c 1046 del resp.headers['Content-encoding']
59ae15a5
PH
1047 # deflate
1048 if resp.headers.get('Content-encoding', '') == 'deflate':
1049 gz = io.BytesIO(self.deflate(resp.read()))
b407d853 1050 resp = compat_urllib_request.addinfourl(gz, old_resp.headers, old_resp.url, old_resp.code)
59ae15a5 1051 resp.msg = old_resp.msg
c047270c 1052 del resp.headers['Content-encoding']
ad729172 1053 # Percent-encode redirect URL of Location HTTP header to satisfy RFC 3986 (see
067aa17e 1054 # https://github.com/ytdl-org/youtube-dl/issues/6457).
5a4d9ddb
S
1055 if 300 <= resp.code < 400:
1056 location = resp.headers.get('Location')
1057 if location:
1058 # As of RFC 2616 default charset is iso-8859-1 that is respected by python 3
1059 if sys.version_info >= (3, 0):
1060 location = location.encode('iso-8859-1').decode('utf-8')
0ea59007
YCH
1061 else:
1062 location = location.decode('utf-8')
5a4d9ddb
S
1063 location_escaped = escape_url(location)
1064 if location != location_escaped:
1065 del resp.headers['Location']
9a4aec8b
YCH
1066 if sys.version_info < (3, 0):
1067 location_escaped = location_escaped.encode('utf-8')
5a4d9ddb 1068 resp.headers['Location'] = location_escaped
59ae15a5 1069 return resp
0f8d03f8 1070
acebc9cd
PH
1071 https_request = http_request
1072 https_response = http_response
bf50b038 1073
5de90176 1074
71aff188
YCH
1075def make_socks_conn_class(base_class, socks_proxy):
1076 assert issubclass(base_class, (
1077 compat_http_client.HTTPConnection, compat_http_client.HTTPSConnection))
1078
1079 url_components = compat_urlparse.urlparse(socks_proxy)
1080 if url_components.scheme.lower() == 'socks5':
1081 socks_type = ProxyType.SOCKS5
1082 elif url_components.scheme.lower() in ('socks', 'socks4'):
1083 socks_type = ProxyType.SOCKS4
51fb4995
YCH
1084 elif url_components.scheme.lower() == 'socks4a':
1085 socks_type = ProxyType.SOCKS4A
71aff188 1086
cdd94c2e
YCH
1087 def unquote_if_non_empty(s):
1088 if not s:
1089 return s
1090 return compat_urllib_parse_unquote_plus(s)
1091
71aff188
YCH
1092 proxy_args = (
1093 socks_type,
1094 url_components.hostname, url_components.port or 1080,
1095 True, # Remote DNS
cdd94c2e
YCH
1096 unquote_if_non_empty(url_components.username),
1097 unquote_if_non_empty(url_components.password),
71aff188
YCH
1098 )
1099
1100 class SocksConnection(base_class):
1101 def connect(self):
1102 self.sock = sockssocket()
1103 self.sock.setproxy(*proxy_args)
1104 if type(self.timeout) in (int, float):
1105 self.sock.settimeout(self.timeout)
1106 self.sock.connect((self.host, self.port))
1107
1108 if isinstance(self, compat_http_client.HTTPSConnection):
1109 if hasattr(self, '_context'): # Python > 2.6
1110 self.sock = self._context.wrap_socket(
1111 self.sock, server_hostname=self.host)
1112 else:
1113 self.sock = ssl.wrap_socket(self.sock)
1114
1115 return SocksConnection
1116
1117
be4a824d
PH
1118class YoutubeDLHTTPSHandler(compat_urllib_request.HTTPSHandler):
1119 def __init__(self, params, https_conn_class=None, *args, **kwargs):
1120 compat_urllib_request.HTTPSHandler.__init__(self, *args, **kwargs)
1121 self._https_conn_class = https_conn_class or compat_http_client.HTTPSConnection
1122 self._params = params
1123
1124 def https_open(self, req):
4f264c02 1125 kwargs = {}
71aff188
YCH
1126 conn_class = self._https_conn_class
1127
4f264c02
JMF
1128 if hasattr(self, '_context'): # python > 2.6
1129 kwargs['context'] = self._context
1130 if hasattr(self, '_check_hostname'): # python 3.x
1131 kwargs['check_hostname'] = self._check_hostname
71aff188
YCH
1132
1133 socks_proxy = req.headers.get('Ytdl-socks-proxy')
1134 if socks_proxy:
1135 conn_class = make_socks_conn_class(conn_class, socks_proxy)
1136 del req.headers['Ytdl-socks-proxy']
1137
be4a824d 1138 return self.do_open(functools.partial(
71aff188 1139 _create_http_connection, self, conn_class, True),
4f264c02 1140 req, **kwargs)
be4a824d
PH
1141
1142
1bab3437 1143class YoutubeDLCookieJar(compat_cookiejar.MozillaCookieJar):
e7e62441 1144 _HTTPONLY_PREFIX = '#HttpOnly_'
1145
1bab3437
S
1146 def save(self, filename=None, ignore_discard=False, ignore_expires=False):
1147 # Store session cookies with `expires` set to 0 instead of an empty
1148 # string
1149 for cookie in self:
1150 if cookie.expires is None:
1151 cookie.expires = 0
1152 compat_cookiejar.MozillaCookieJar.save(self, filename, ignore_discard, ignore_expires)
1153
1154 def load(self, filename=None, ignore_discard=False, ignore_expires=False):
e7e62441 1155 """Load cookies from a file."""
1156 if filename is None:
1157 if self.filename is not None:
1158 filename = self.filename
1159 else:
1160 raise ValueError(compat_cookiejar.MISSING_FILENAME_TEXT)
1161
1162 cf = io.StringIO()
1163 with open(filename) as f:
1164 for line in f:
1165 if line.startswith(self._HTTPONLY_PREFIX):
1166 line = line[len(self._HTTPONLY_PREFIX):]
1167 cf.write(compat_str(line))
1168 cf.seek(0)
1169 self._really_load(cf, filename, ignore_discard, ignore_expires)
1bab3437
S
1170 # Session cookies are denoted by either `expires` field set to
1171 # an empty string or 0. MozillaCookieJar only recognizes the former
1172 # (see [1]). So we need force the latter to be recognized as session
1173 # cookies on our own.
1174 # Session cookies may be important for cookies-based authentication,
1175 # e.g. usually, when user does not check 'Remember me' check box while
1176 # logging in on a site, some important cookies are stored as session
1177 # cookies so that not recognizing them will result in failed login.
1178 # 1. https://bugs.python.org/issue17164
1179 for cookie in self:
1180 # Treat `expires=0` cookies as session cookies
1181 if cookie.expires == 0:
1182 cookie.expires = None
1183 cookie.discard = True
1184
1185
a6420bf5
S
1186class YoutubeDLCookieProcessor(compat_urllib_request.HTTPCookieProcessor):
1187 def __init__(self, cookiejar=None):
1188 compat_urllib_request.HTTPCookieProcessor.__init__(self, cookiejar)
1189
1190 def http_response(self, request, response):
1191 # Python 2 will choke on next HTTP request in row if there are non-ASCII
1192 # characters in Set-Cookie HTTP header of last response (see
067aa17e 1193 # https://github.com/ytdl-org/youtube-dl/issues/6769).
a6420bf5
S
1194 # In order to at least prevent crashing we will percent encode Set-Cookie
1195 # header before HTTPCookieProcessor starts processing it.
e28034c5
S
1196 # if sys.version_info < (3, 0) and response.headers:
1197 # for set_cookie_header in ('Set-Cookie', 'Set-Cookie2'):
1198 # set_cookie = response.headers.get(set_cookie_header)
1199 # if set_cookie:
1200 # set_cookie_escaped = compat_urllib_parse.quote(set_cookie, b"%/;:@&=+$,!~*'()?#[] ")
1201 # if set_cookie != set_cookie_escaped:
1202 # del response.headers[set_cookie_header]
1203 # response.headers[set_cookie_header] = set_cookie_escaped
a6420bf5
S
1204 return compat_urllib_request.HTTPCookieProcessor.http_response(self, request, response)
1205
1206 https_request = compat_urllib_request.HTTPCookieProcessor.http_request
1207 https_response = http_response
1208
1209
46f59e89
S
1210def extract_timezone(date_str):
1211 m = re.search(
1212 r'^.{8,}?(?P<tz>Z$| ?(?P<sign>\+|-)(?P<hours>[0-9]{2}):?(?P<minutes>[0-9]{2})$)',
1213 date_str)
1214 if not m:
1215 timezone = datetime.timedelta()
1216 else:
1217 date_str = date_str[:-len(m.group('tz'))]
1218 if not m.group('sign'):
1219 timezone = datetime.timedelta()
1220 else:
1221 sign = 1 if m.group('sign') == '+' else -1
1222 timezone = datetime.timedelta(
1223 hours=sign * int(m.group('hours')),
1224 minutes=sign * int(m.group('minutes')))
1225 return timezone, date_str
1226
1227
08b38d54 1228def parse_iso8601(date_str, delimiter='T', timezone=None):
912b38b4
PH
1229 """ Return a UNIX timestamp from the given date """
1230
1231 if date_str is None:
1232 return None
1233
52c3a6e4
S
1234 date_str = re.sub(r'\.[0-9]+', '', date_str)
1235
08b38d54 1236 if timezone is None:
46f59e89
S
1237 timezone, date_str = extract_timezone(date_str)
1238
52c3a6e4
S
1239 try:
1240 date_format = '%Y-%m-%d{0}%H:%M:%S'.format(delimiter)
1241 dt = datetime.datetime.strptime(date_str, date_format) - timezone
1242 return calendar.timegm(dt.timetuple())
1243 except ValueError:
1244 pass
912b38b4
PH
1245
1246
46f59e89
S
1247def date_formats(day_first=True):
1248 return DATE_FORMATS_DAY_FIRST if day_first else DATE_FORMATS_MONTH_FIRST
1249
1250
42bdd9d0 1251def unified_strdate(date_str, day_first=True):
bf50b038 1252 """Return a string with the date in the format YYYYMMDD"""
64e7ad60
PH
1253
1254 if date_str is None:
1255 return None
bf50b038 1256 upload_date = None
5f6a1245 1257 # Replace commas
026fcc04 1258 date_str = date_str.replace(',', ' ')
42bdd9d0 1259 # Remove AM/PM + timezone
9bb8e0a3 1260 date_str = re.sub(r'(?i)\s*(?:AM|PM)(?:\s+[A-Z]+)?', '', date_str)
46f59e89 1261 _, date_str = extract_timezone(date_str)
42bdd9d0 1262
46f59e89 1263 for expression in date_formats(day_first):
bf50b038
JMF
1264 try:
1265 upload_date = datetime.datetime.strptime(date_str, expression).strftime('%Y%m%d')
5de90176 1266 except ValueError:
bf50b038 1267 pass
42393ce2
PH
1268 if upload_date is None:
1269 timetuple = email.utils.parsedate_tz(date_str)
1270 if timetuple:
c6b9cf05
S
1271 try:
1272 upload_date = datetime.datetime(*timetuple[:6]).strftime('%Y%m%d')
1273 except ValueError:
1274 pass
6a750402
JMF
1275 if upload_date is not None:
1276 return compat_str(upload_date)
bf50b038 1277
5f6a1245 1278
46f59e89
S
1279def unified_timestamp(date_str, day_first=True):
1280 if date_str is None:
1281 return None
1282
2ae2ffda 1283 date_str = re.sub(r'[,|]', '', date_str)
46f59e89 1284
7dc2a74e 1285 pm_delta = 12 if re.search(r'(?i)PM', date_str) else 0
46f59e89
S
1286 timezone, date_str = extract_timezone(date_str)
1287
1288 # Remove AM/PM + timezone
1289 date_str = re.sub(r'(?i)\s*(?:AM|PM)(?:\s+[A-Z]+)?', '', date_str)
1290
deef3195
S
1291 # Remove unrecognized timezones from ISO 8601 alike timestamps
1292 m = re.search(r'\d{1,2}:\d{1,2}(?:\.\d+)?(?P<tz>\s*[A-Z]+)$', date_str)
1293 if m:
1294 date_str = date_str[:-len(m.group('tz'))]
1295
f226880c
PH
1296 # Python only supports microseconds, so remove nanoseconds
1297 m = re.search(r'^([0-9]{4,}-[0-9]{1,2}-[0-9]{1,2}T[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}\.[0-9]{6})[0-9]+$', date_str)
1298 if m:
1299 date_str = m.group(1)
1300
46f59e89
S
1301 for expression in date_formats(day_first):
1302 try:
7dc2a74e 1303 dt = datetime.datetime.strptime(date_str, expression) - timezone + datetime.timedelta(hours=pm_delta)
46f59e89
S
1304 return calendar.timegm(dt.timetuple())
1305 except ValueError:
1306 pass
1307 timetuple = email.utils.parsedate_tz(date_str)
1308 if timetuple:
7dc2a74e 1309 return calendar.timegm(timetuple) + pm_delta * 3600
46f59e89
S
1310
1311
28e614de 1312def determine_ext(url, default_ext='unknown_video'):
85750f89 1313 if url is None or '.' not in url:
f4776371 1314 return default_ext
9cb9a5df 1315 guess = url.partition('?')[0].rpartition('.')[2]
73e79f2a
PH
1316 if re.match(r'^[A-Za-z0-9]+$', guess):
1317 return guess
a7aaa398
S
1318 # Try extract ext from URLs like http://example.com/foo/bar.mp4/?download
1319 elif guess.rstrip('/') in KNOWN_EXTENSIONS:
9cb9a5df 1320 return guess.rstrip('/')
73e79f2a 1321 else:
cbdbb766 1322 return default_ext
73e79f2a 1323
5f6a1245 1324
d4051a8e 1325def subtitles_filename(filename, sub_lang, sub_format):
28e614de 1326 return filename.rsplit('.', 1)[0] + '.' + sub_lang + '.' + sub_format
d4051a8e 1327
5f6a1245 1328
bd558525 1329def date_from_str(date_str):
37254abc
JMF
1330 """
1331 Return a datetime object from a string in the format YYYYMMDD or
1332 (now|today)[+-][0-9](day|week|month|year)(s)?"""
1333 today = datetime.date.today()
f8795e10 1334 if date_str in ('now', 'today'):
37254abc 1335 return today
f8795e10
PH
1336 if date_str == 'yesterday':
1337 return today - datetime.timedelta(days=1)
ec85ded8 1338 match = re.match(r'(now|today)(?P<sign>[+-])(?P<time>\d+)(?P<unit>day|week|month|year)(s)?', date_str)
37254abc
JMF
1339 if match is not None:
1340 sign = match.group('sign')
1341 time = int(match.group('time'))
1342 if sign == '-':
1343 time = -time
1344 unit = match.group('unit')
dfb1b146 1345 # A bad approximation?
37254abc
JMF
1346 if unit == 'month':
1347 unit = 'day'
1348 time *= 30
1349 elif unit == 'year':
1350 unit = 'day'
1351 time *= 365
1352 unit += 's'
1353 delta = datetime.timedelta(**{unit: time})
1354 return today + delta
611c1dd9 1355 return datetime.datetime.strptime(date_str, '%Y%m%d').date()
5f6a1245
JW
1356
1357
e63fc1be 1358def hyphenate_date(date_str):
1359 """
1360 Convert a date in 'YYYYMMDD' format to 'YYYY-MM-DD' format"""
1361 match = re.match(r'^(\d\d\d\d)(\d\d)(\d\d)$', date_str)
1362 if match is not None:
1363 return '-'.join(match.groups())
1364 else:
1365 return date_str
1366
5f6a1245 1367
bd558525
JMF
1368class DateRange(object):
1369 """Represents a time interval between two dates"""
5f6a1245 1370
bd558525
JMF
1371 def __init__(self, start=None, end=None):
1372 """start and end must be strings in the format accepted by date"""
1373 if start is not None:
1374 self.start = date_from_str(start)
1375 else:
1376 self.start = datetime.datetime.min.date()
1377 if end is not None:
1378 self.end = date_from_str(end)
1379 else:
1380 self.end = datetime.datetime.max.date()
37254abc 1381 if self.start > self.end:
bd558525 1382 raise ValueError('Date range: "%s" , the start date must be before the end date' % self)
5f6a1245 1383
bd558525
JMF
1384 @classmethod
1385 def day(cls, day):
1386 """Returns a range that only contains the given day"""
5f6a1245
JW
1387 return cls(day, day)
1388
bd558525
JMF
1389 def __contains__(self, date):
1390 """Check if the date is in the range"""
37254abc
JMF
1391 if not isinstance(date, datetime.date):
1392 date = date_from_str(date)
1393 return self.start <= date <= self.end
5f6a1245 1394
bd558525 1395 def __str__(self):
5f6a1245 1396 return '%s - %s' % (self.start.isoformat(), self.end.isoformat())
c496ca96
PH
1397
1398
1399def platform_name():
1400 """ Returns the platform name as a compat_str """
1401 res = platform.platform()
1402 if isinstance(res, bytes):
1403 res = res.decode(preferredencoding())
1404
1405 assert isinstance(res, compat_str)
1406 return res
c257baff
PH
1407
1408
b58ddb32
PH
1409def _windows_write_string(s, out):
1410 """ Returns True if the string was written using special methods,
1411 False if it has yet to be written out."""
1412 # Adapted from http://stackoverflow.com/a/3259271/35070
1413
1414 import ctypes
1415 import ctypes.wintypes
1416
1417 WIN_OUTPUT_IDS = {
1418 1: -11,
1419 2: -12,
1420 }
1421
a383a98a
PH
1422 try:
1423 fileno = out.fileno()
1424 except AttributeError:
1425 # If the output stream doesn't have a fileno, it's virtual
1426 return False
aa42e873
PH
1427 except io.UnsupportedOperation:
1428 # Some strange Windows pseudo files?
1429 return False
b58ddb32
PH
1430 if fileno not in WIN_OUTPUT_IDS:
1431 return False
1432
d7cd9a9e 1433 GetStdHandle = compat_ctypes_WINFUNCTYPE(
b58ddb32 1434 ctypes.wintypes.HANDLE, ctypes.wintypes.DWORD)(
d7cd9a9e 1435 ('GetStdHandle', ctypes.windll.kernel32))
b58ddb32
PH
1436 h = GetStdHandle(WIN_OUTPUT_IDS[fileno])
1437
d7cd9a9e 1438 WriteConsoleW = compat_ctypes_WINFUNCTYPE(
b58ddb32
PH
1439 ctypes.wintypes.BOOL, ctypes.wintypes.HANDLE, ctypes.wintypes.LPWSTR,
1440 ctypes.wintypes.DWORD, ctypes.POINTER(ctypes.wintypes.DWORD),
d7cd9a9e 1441 ctypes.wintypes.LPVOID)(('WriteConsoleW', ctypes.windll.kernel32))
b58ddb32
PH
1442 written = ctypes.wintypes.DWORD(0)
1443
d7cd9a9e 1444 GetFileType = compat_ctypes_WINFUNCTYPE(ctypes.wintypes.DWORD, ctypes.wintypes.DWORD)(('GetFileType', ctypes.windll.kernel32))
b58ddb32
PH
1445 FILE_TYPE_CHAR = 0x0002
1446 FILE_TYPE_REMOTE = 0x8000
d7cd9a9e 1447 GetConsoleMode = compat_ctypes_WINFUNCTYPE(
b58ddb32
PH
1448 ctypes.wintypes.BOOL, ctypes.wintypes.HANDLE,
1449 ctypes.POINTER(ctypes.wintypes.DWORD))(
d7cd9a9e 1450 ('GetConsoleMode', ctypes.windll.kernel32))
b58ddb32
PH
1451 INVALID_HANDLE_VALUE = ctypes.wintypes.DWORD(-1).value
1452
1453 def not_a_console(handle):
1454 if handle == INVALID_HANDLE_VALUE or handle is None:
1455 return True
8fb3ac36
PH
1456 return ((GetFileType(handle) & ~FILE_TYPE_REMOTE) != FILE_TYPE_CHAR or
1457 GetConsoleMode(handle, ctypes.byref(ctypes.wintypes.DWORD())) == 0)
b58ddb32
PH
1458
1459 if not_a_console(h):
1460 return False
1461
d1b9c912
PH
1462 def next_nonbmp_pos(s):
1463 try:
1464 return next(i for i, c in enumerate(s) if ord(c) > 0xffff)
1465 except StopIteration:
1466 return len(s)
1467
1468 while s:
1469 count = min(next_nonbmp_pos(s), 1024)
1470
b58ddb32 1471 ret = WriteConsoleW(
d1b9c912 1472 h, s, count if count else 2, ctypes.byref(written), None)
b58ddb32
PH
1473 if ret == 0:
1474 raise OSError('Failed to write string')
d1b9c912
PH
1475 if not count: # We just wrote a non-BMP character
1476 assert written.value == 2
1477 s = s[1:]
1478 else:
1479 assert written.value > 0
1480 s = s[written.value:]
b58ddb32
PH
1481 return True
1482
1483
734f90bb 1484def write_string(s, out=None, encoding=None):
7459e3a2
PH
1485 if out is None:
1486 out = sys.stderr
8bf48f23 1487 assert type(s) == compat_str
7459e3a2 1488
b58ddb32
PH
1489 if sys.platform == 'win32' and encoding is None and hasattr(out, 'fileno'):
1490 if _windows_write_string(s, out):
1491 return
1492
7459e3a2
PH
1493 if ('b' in getattr(out, 'mode', '') or
1494 sys.version_info[0] < 3): # Python 2 lies about mode of sys.stderr
104aa738
PH
1495 byt = s.encode(encoding or preferredencoding(), 'ignore')
1496 out.write(byt)
1497 elif hasattr(out, 'buffer'):
1498 enc = encoding or getattr(out, 'encoding', None) or preferredencoding()
1499 byt = s.encode(enc, 'ignore')
1500 out.buffer.write(byt)
1501 else:
8bf48f23 1502 out.write(s)
7459e3a2
PH
1503 out.flush()
1504
1505
48ea9cea
PH
1506def bytes_to_intlist(bs):
1507 if not bs:
1508 return []
1509 if isinstance(bs[0], int): # Python 3
1510 return list(bs)
1511 else:
1512 return [ord(c) for c in bs]
1513
c257baff 1514
cba892fa 1515def intlist_to_bytes(xs):
1516 if not xs:
1517 return b''
edaa23f8 1518 return compat_struct_pack('%dB' % len(xs), *xs)
c38b1e77
PH
1519
1520
c1c9a79c
PH
1521# Cross-platform file locking
1522if sys.platform == 'win32':
1523 import ctypes.wintypes
1524 import msvcrt
1525
1526 class OVERLAPPED(ctypes.Structure):
1527 _fields_ = [
1528 ('Internal', ctypes.wintypes.LPVOID),
1529 ('InternalHigh', ctypes.wintypes.LPVOID),
1530 ('Offset', ctypes.wintypes.DWORD),
1531 ('OffsetHigh', ctypes.wintypes.DWORD),
1532 ('hEvent', ctypes.wintypes.HANDLE),
1533 ]
1534
1535 kernel32 = ctypes.windll.kernel32
1536 LockFileEx = kernel32.LockFileEx
1537 LockFileEx.argtypes = [
1538 ctypes.wintypes.HANDLE, # hFile
1539 ctypes.wintypes.DWORD, # dwFlags
1540 ctypes.wintypes.DWORD, # dwReserved
1541 ctypes.wintypes.DWORD, # nNumberOfBytesToLockLow
1542 ctypes.wintypes.DWORD, # nNumberOfBytesToLockHigh
1543 ctypes.POINTER(OVERLAPPED) # Overlapped
1544 ]
1545 LockFileEx.restype = ctypes.wintypes.BOOL
1546 UnlockFileEx = kernel32.UnlockFileEx
1547 UnlockFileEx.argtypes = [
1548 ctypes.wintypes.HANDLE, # hFile
1549 ctypes.wintypes.DWORD, # dwReserved
1550 ctypes.wintypes.DWORD, # nNumberOfBytesToLockLow
1551 ctypes.wintypes.DWORD, # nNumberOfBytesToLockHigh
1552 ctypes.POINTER(OVERLAPPED) # Overlapped
1553 ]
1554 UnlockFileEx.restype = ctypes.wintypes.BOOL
1555 whole_low = 0xffffffff
1556 whole_high = 0x7fffffff
1557
1558 def _lock_file(f, exclusive):
1559 overlapped = OVERLAPPED()
1560 overlapped.Offset = 0
1561 overlapped.OffsetHigh = 0
1562 overlapped.hEvent = 0
1563 f._lock_file_overlapped_p = ctypes.pointer(overlapped)
1564 handle = msvcrt.get_osfhandle(f.fileno())
1565 if not LockFileEx(handle, 0x2 if exclusive else 0x0, 0,
1566 whole_low, whole_high, f._lock_file_overlapped_p):
1567 raise OSError('Locking file failed: %r' % ctypes.FormatError())
1568
1569 def _unlock_file(f):
1570 assert f._lock_file_overlapped_p
1571 handle = msvcrt.get_osfhandle(f.fileno())
1572 if not UnlockFileEx(handle, 0,
1573 whole_low, whole_high, f._lock_file_overlapped_p):
1574 raise OSError('Unlocking file failed: %r' % ctypes.FormatError())
1575
1576else:
399a76e6
YCH
1577 # Some platforms, such as Jython, is missing fcntl
1578 try:
1579 import fcntl
c1c9a79c 1580
399a76e6
YCH
1581 def _lock_file(f, exclusive):
1582 fcntl.flock(f, fcntl.LOCK_EX if exclusive else fcntl.LOCK_SH)
c1c9a79c 1583
399a76e6
YCH
1584 def _unlock_file(f):
1585 fcntl.flock(f, fcntl.LOCK_UN)
1586 except ImportError:
1587 UNSUPPORTED_MSG = 'file locking is not supported on this platform'
1588
1589 def _lock_file(f, exclusive):
1590 raise IOError(UNSUPPORTED_MSG)
1591
1592 def _unlock_file(f):
1593 raise IOError(UNSUPPORTED_MSG)
c1c9a79c
PH
1594
1595
1596class locked_file(object):
1597 def __init__(self, filename, mode, encoding=None):
1598 assert mode in ['r', 'a', 'w']
1599 self.f = io.open(filename, mode, encoding=encoding)
1600 self.mode = mode
1601
1602 def __enter__(self):
1603 exclusive = self.mode != 'r'
1604 try:
1605 _lock_file(self.f, exclusive)
1606 except IOError:
1607 self.f.close()
1608 raise
1609 return self
1610
1611 def __exit__(self, etype, value, traceback):
1612 try:
1613 _unlock_file(self.f)
1614 finally:
1615 self.f.close()
1616
1617 def __iter__(self):
1618 return iter(self.f)
1619
1620 def write(self, *args):
1621 return self.f.write(*args)
1622
1623 def read(self, *args):
1624 return self.f.read(*args)
4eb7f1d1
JMF
1625
1626
4644ac55
S
1627def get_filesystem_encoding():
1628 encoding = sys.getfilesystemencoding()
1629 return encoding if encoding is not None else 'utf-8'
1630
1631
4eb7f1d1 1632def shell_quote(args):
a6a173c2 1633 quoted_args = []
4644ac55 1634 encoding = get_filesystem_encoding()
a6a173c2
JMF
1635 for a in args:
1636 if isinstance(a, bytes):
1637 # We may get a filename encoded with 'encodeFilename'
1638 a = a.decode(encoding)
aefce8e6 1639 quoted_args.append(compat_shlex_quote(a))
28e614de 1640 return ' '.join(quoted_args)
9d4660ca
PH
1641
1642
1643def smuggle_url(url, data):
1644 """ Pass additional data in a URL for internal use. """
1645
81953d1a
RA
1646 url, idata = unsmuggle_url(url, {})
1647 data.update(idata)
15707c7e 1648 sdata = compat_urllib_parse_urlencode(
28e614de
PH
1649 {'__youtubedl_smuggle': json.dumps(data)})
1650 return url + '#' + sdata
9d4660ca
PH
1651
1652
79f82953 1653def unsmuggle_url(smug_url, default=None):
83e865a3 1654 if '#__youtubedl_smuggle' not in smug_url:
79f82953 1655 return smug_url, default
28e614de
PH
1656 url, _, sdata = smug_url.rpartition('#')
1657 jsond = compat_parse_qs(sdata)['__youtubedl_smuggle'][0]
9d4660ca
PH
1658 data = json.loads(jsond)
1659 return url, data
02dbf93f
PH
1660
1661
02dbf93f
PH
1662def format_bytes(bytes):
1663 if bytes is None:
28e614de 1664 return 'N/A'
02dbf93f
PH
1665 if type(bytes) is str:
1666 bytes = float(bytes)
1667 if bytes == 0.0:
1668 exponent = 0
1669 else:
1670 exponent = int(math.log(bytes, 1024.0))
28e614de 1671 suffix = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'][exponent]
02dbf93f 1672 converted = float(bytes) / float(1024 ** exponent)
28e614de 1673 return '%.2f%s' % (converted, suffix)
f53c966a 1674
1c088fa8 1675
fb47597b
S
1676def lookup_unit_table(unit_table, s):
1677 units_re = '|'.join(re.escape(u) for u in unit_table)
1678 m = re.match(
782b1b5b 1679 r'(?P<num>[0-9]+(?:[,.][0-9]*)?)\s*(?P<unit>%s)\b' % units_re, s)
fb47597b
S
1680 if not m:
1681 return None
1682 num_str = m.group('num').replace(',', '.')
1683 mult = unit_table[m.group('unit')]
1684 return int(float(num_str) * mult)
1685
1686
be64b5b0
PH
1687def parse_filesize(s):
1688 if s is None:
1689 return None
1690
dfb1b146 1691 # The lower-case forms are of course incorrect and unofficial,
be64b5b0
PH
1692 # but we support those too
1693 _UNIT_TABLE = {
1694 'B': 1,
1695 'b': 1,
70852b47 1696 'bytes': 1,
be64b5b0
PH
1697 'KiB': 1024,
1698 'KB': 1000,
1699 'kB': 1024,
1700 'Kb': 1000,
13585d76 1701 'kb': 1000,
70852b47
YCH
1702 'kilobytes': 1000,
1703 'kibibytes': 1024,
be64b5b0
PH
1704 'MiB': 1024 ** 2,
1705 'MB': 1000 ** 2,
1706 'mB': 1024 ** 2,
1707 'Mb': 1000 ** 2,
13585d76 1708 'mb': 1000 ** 2,
70852b47
YCH
1709 'megabytes': 1000 ** 2,
1710 'mebibytes': 1024 ** 2,
be64b5b0
PH
1711 'GiB': 1024 ** 3,
1712 'GB': 1000 ** 3,
1713 'gB': 1024 ** 3,
1714 'Gb': 1000 ** 3,
13585d76 1715 'gb': 1000 ** 3,
70852b47
YCH
1716 'gigabytes': 1000 ** 3,
1717 'gibibytes': 1024 ** 3,
be64b5b0
PH
1718 'TiB': 1024 ** 4,
1719 'TB': 1000 ** 4,
1720 'tB': 1024 ** 4,
1721 'Tb': 1000 ** 4,
13585d76 1722 'tb': 1000 ** 4,
70852b47
YCH
1723 'terabytes': 1000 ** 4,
1724 'tebibytes': 1024 ** 4,
be64b5b0
PH
1725 'PiB': 1024 ** 5,
1726 'PB': 1000 ** 5,
1727 'pB': 1024 ** 5,
1728 'Pb': 1000 ** 5,
13585d76 1729 'pb': 1000 ** 5,
70852b47
YCH
1730 'petabytes': 1000 ** 5,
1731 'pebibytes': 1024 ** 5,
be64b5b0
PH
1732 'EiB': 1024 ** 6,
1733 'EB': 1000 ** 6,
1734 'eB': 1024 ** 6,
1735 'Eb': 1000 ** 6,
13585d76 1736 'eb': 1000 ** 6,
70852b47
YCH
1737 'exabytes': 1000 ** 6,
1738 'exbibytes': 1024 ** 6,
be64b5b0
PH
1739 'ZiB': 1024 ** 7,
1740 'ZB': 1000 ** 7,
1741 'zB': 1024 ** 7,
1742 'Zb': 1000 ** 7,
13585d76 1743 'zb': 1000 ** 7,
70852b47
YCH
1744 'zettabytes': 1000 ** 7,
1745 'zebibytes': 1024 ** 7,
be64b5b0
PH
1746 'YiB': 1024 ** 8,
1747 'YB': 1000 ** 8,
1748 'yB': 1024 ** 8,
1749 'Yb': 1000 ** 8,
13585d76 1750 'yb': 1000 ** 8,
70852b47
YCH
1751 'yottabytes': 1000 ** 8,
1752 'yobibytes': 1024 ** 8,
be64b5b0
PH
1753 }
1754
fb47597b
S
1755 return lookup_unit_table(_UNIT_TABLE, s)
1756
1757
1758def parse_count(s):
1759 if s is None:
be64b5b0
PH
1760 return None
1761
fb47597b
S
1762 s = s.strip()
1763
1764 if re.match(r'^[\d,.]+$', s):
1765 return str_to_int(s)
1766
1767 _UNIT_TABLE = {
1768 'k': 1000,
1769 'K': 1000,
1770 'm': 1000 ** 2,
1771 'M': 1000 ** 2,
1772 'kk': 1000 ** 2,
1773 'KK': 1000 ** 2,
1774 }
be64b5b0 1775
fb47597b 1776 return lookup_unit_table(_UNIT_TABLE, s)
be64b5b0 1777
2f7ae819 1778
b871d7e9
S
1779def parse_resolution(s):
1780 if s is None:
1781 return {}
1782
1783 mobj = re.search(r'\b(?P<w>\d+)\s*[xX×]\s*(?P<h>\d+)\b', s)
1784 if mobj:
1785 return {
1786 'width': int(mobj.group('w')),
1787 'height': int(mobj.group('h')),
1788 }
1789
1790 mobj = re.search(r'\b(\d+)[pPiI]\b', s)
1791 if mobj:
1792 return {'height': int(mobj.group(1))}
1793
1794 mobj = re.search(r'\b([48])[kK]\b', s)
1795 if mobj:
1796 return {'height': int(mobj.group(1)) * 540}
1797
1798 return {}
1799
1800
a942d6cb 1801def month_by_name(name, lang='en'):
caefb1de
PH
1802 """ Return the number of a month by (locale-independently) English name """
1803
f6717dec 1804 month_names = MONTH_NAMES.get(lang, MONTH_NAMES['en'])
a942d6cb 1805
caefb1de 1806 try:
f6717dec 1807 return month_names.index(name) + 1
7105440c
YCH
1808 except ValueError:
1809 return None
1810
1811
1812def month_by_abbreviation(abbrev):
1813 """ Return the number of a month by (locale-independently) English
1814 abbreviations """
1815
1816 try:
1817 return [s[:3] for s in ENGLISH_MONTH_NAMES].index(abbrev) + 1
caefb1de
PH
1818 except ValueError:
1819 return None
18258362
JMF
1820
1821
5aafe895 1822def fix_xml_ampersands(xml_str):
18258362 1823 """Replace all the '&' by '&amp;' in XML"""
5aafe895
PH
1824 return re.sub(
1825 r'&(?!amp;|lt;|gt;|apos;|quot;|#x[0-9a-fA-F]{,4};|#[0-9]{,4};)',
28e614de 1826 '&amp;',
5aafe895 1827 xml_str)
e3946f98
PH
1828
1829
1830def setproctitle(title):
8bf48f23 1831 assert isinstance(title, compat_str)
c1c05c67
YCH
1832
1833 # ctypes in Jython is not complete
1834 # http://bugs.jython.org/issue2148
1835 if sys.platform.startswith('java'):
1836 return
1837
e3946f98 1838 try:
611c1dd9 1839 libc = ctypes.cdll.LoadLibrary('libc.so.6')
e3946f98
PH
1840 except OSError:
1841 return
2f49bcd6
RC
1842 except TypeError:
1843 # LoadLibrary in Windows Python 2.7.13 only expects
1844 # a bytestring, but since unicode_literals turns
1845 # every string into a unicode string, it fails.
1846 return
6eefe533
PH
1847 title_bytes = title.encode('utf-8')
1848 buf = ctypes.create_string_buffer(len(title_bytes))
1849 buf.value = title_bytes
e3946f98 1850 try:
6eefe533 1851 libc.prctl(15, buf, 0, 0, 0)
e3946f98
PH
1852 except AttributeError:
1853 return # Strange libc, just skip this
d7dda168
PH
1854
1855
1856def remove_start(s, start):
46bc9b7d 1857 return s[len(start):] if s is not None and s.startswith(start) else s
29eb5174
PH
1858
1859
2b9faf55 1860def remove_end(s, end):
46bc9b7d 1861 return s[:-len(end)] if s is not None and s.endswith(end) else s
2b9faf55
PH
1862
1863
31b2051e
S
1864def remove_quotes(s):
1865 if s is None or len(s) < 2:
1866 return s
1867 for quote in ('"', "'", ):
1868 if s[0] == quote and s[-1] == quote:
1869 return s[1:-1]
1870 return s
1871
1872
29eb5174 1873def url_basename(url):
9b8aaeed 1874 path = compat_urlparse.urlparse(url).path
28e614de 1875 return path.strip('/').split('/')[-1]
aa94a6d3
PH
1876
1877
02dc0a36
S
1878def base_url(url):
1879 return re.match(r'https?://[^?#&]+/', url).group()
1880
1881
e34c3361 1882def urljoin(base, path):
4b5de77b
S
1883 if isinstance(path, bytes):
1884 path = path.decode('utf-8')
e34c3361
S
1885 if not isinstance(path, compat_str) or not path:
1886 return None
fad4ceb5 1887 if re.match(r'^(?:[a-zA-Z][a-zA-Z0-9+-.]*:)?//', path):
e34c3361 1888 return path
4b5de77b
S
1889 if isinstance(base, bytes):
1890 base = base.decode('utf-8')
1891 if not isinstance(base, compat_str) or not re.match(
1892 r'^(?:https?:)?//', base):
e34c3361
S
1893 return None
1894 return compat_urlparse.urljoin(base, path)
1895
1896
aa94a6d3
PH
1897class HEADRequest(compat_urllib_request.Request):
1898 def get_method(self):
611c1dd9 1899 return 'HEAD'
7217e148
PH
1900
1901
95cf60e8
S
1902class PUTRequest(compat_urllib_request.Request):
1903 def get_method(self):
1904 return 'PUT'
1905
1906
9732d77e 1907def int_or_none(v, scale=1, default=None, get_attr=None, invscale=1):
28746fbd
PH
1908 if get_attr:
1909 if v is not None:
1910 v = getattr(v, get_attr, None)
9572013d
PH
1911 if v == '':
1912 v = None
1812afb7
S
1913 if v is None:
1914 return default
1915 try:
1916 return int(v) * invscale // scale
1917 except ValueError:
af98f8ff 1918 return default
9732d77e 1919
9572013d 1920
40a90862
JMF
1921def str_or_none(v, default=None):
1922 return default if v is None else compat_str(v)
1923
9732d77e
PH
1924
1925def str_to_int(int_str):
48d4681e 1926 """ A more relaxed version of int_or_none """
9732d77e
PH
1927 if int_str is None:
1928 return None
28e614de 1929 int_str = re.sub(r'[,\.\+]', '', int_str)
9732d77e 1930 return int(int_str)
608d11f5
PH
1931
1932
9732d77e 1933def float_or_none(v, scale=1, invscale=1, default=None):
caf80631
S
1934 if v is None:
1935 return default
1936 try:
1937 return float(v) * invscale / scale
1938 except ValueError:
1939 return default
43f775e4
PH
1940
1941
c7e327c4
S
1942def bool_or_none(v, default=None):
1943 return v if isinstance(v, bool) else default
1944
1945
b72b4431
S
1946def strip_or_none(v):
1947 return None if v is None else v.strip()
1948
1949
af03000a
S
1950def url_or_none(url):
1951 if not url or not isinstance(url, compat_str):
1952 return None
1953 url = url.strip()
1954 return url if re.match(r'^(?:[a-zA-Z][\da-zA-Z.+-]*:)?//', url) else None
1955
1956
608d11f5 1957def parse_duration(s):
8f9312c3 1958 if not isinstance(s, compat_basestring):
608d11f5
PH
1959 return None
1960
ca7b3246
S
1961 s = s.strip()
1962
acaff495 1963 days, hours, mins, secs, ms = [None] * 5
15846398 1964 m = re.match(r'(?:(?:(?:(?P<days>[0-9]+):)?(?P<hours>[0-9]+):)?(?P<mins>[0-9]+):)?(?P<secs>[0-9]+)(?P<ms>\.[0-9]+)?Z?$', s)
acaff495 1965 if m:
1966 days, hours, mins, secs, ms = m.groups()
1967 else:
1968 m = re.match(
056653bb
S
1969 r'''(?ix)(?:P?
1970 (?:
1971 [0-9]+\s*y(?:ears?)?\s*
1972 )?
1973 (?:
1974 [0-9]+\s*m(?:onths?)?\s*
1975 )?
1976 (?:
1977 [0-9]+\s*w(?:eeks?)?\s*
1978 )?
8f4b58d7 1979 (?:
acaff495 1980 (?P<days>[0-9]+)\s*d(?:ays?)?\s*
8f4b58d7 1981 )?
056653bb 1982 T)?
acaff495 1983 (?:
1984 (?P<hours>[0-9]+)\s*h(?:ours?)?\s*
1985 )?
1986 (?:
1987 (?P<mins>[0-9]+)\s*m(?:in(?:ute)?s?)?\s*
1988 )?
1989 (?:
1990 (?P<secs>[0-9]+)(?P<ms>\.[0-9]+)?\s*s(?:ec(?:ond)?s?)?\s*
15846398 1991 )?Z?$''', s)
acaff495 1992 if m:
1993 days, hours, mins, secs, ms = m.groups()
1994 else:
15846398 1995 m = re.match(r'(?i)(?:(?P<hours>[0-9.]+)\s*(?:hours?)|(?P<mins>[0-9.]+)\s*(?:mins?\.?|minutes?)\s*)Z?$', s)
acaff495 1996 if m:
1997 hours, mins = m.groups()
1998 else:
1999 return None
2000
2001 duration = 0
2002 if secs:
2003 duration += float(secs)
2004 if mins:
2005 duration += float(mins) * 60
2006 if hours:
2007 duration += float(hours) * 60 * 60
2008 if days:
2009 duration += float(days) * 24 * 60 * 60
2010 if ms:
2011 duration += float(ms)
2012 return duration
91d7d0b3
JMF
2013
2014
e65e4c88 2015def prepend_extension(filename, ext, expected_real_ext=None):
5f6a1245 2016 name, real_ext = os.path.splitext(filename)
e65e4c88
S
2017 return (
2018 '{0}.{1}{2}'.format(name, ext, real_ext)
2019 if not expected_real_ext or real_ext[1:] == expected_real_ext
2020 else '{0}.{1}'.format(filename, ext))
d70ad093
PH
2021
2022
b3ed15b7
S
2023def replace_extension(filename, ext, expected_real_ext=None):
2024 name, real_ext = os.path.splitext(filename)
2025 return '{0}.{1}'.format(
2026 name if not expected_real_ext or real_ext[1:] == expected_real_ext else filename,
2027 ext)
2028
2029
d70ad093
PH
2030def check_executable(exe, args=[]):
2031 """ Checks if the given binary is installed somewhere in PATH, and returns its name.
2032 args can be a list of arguments for a short output (like -version) """
2033 try:
2034 subprocess.Popen([exe] + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
2035 except OSError:
2036 return False
2037 return exe
b7ab0590
PH
2038
2039
95807118 2040def get_exe_version(exe, args=['--version'],
cae97f65 2041 version_re=None, unrecognized='present'):
95807118
PH
2042 """ Returns the version of the specified executable,
2043 or False if the executable is not present """
2044 try:
b64d04c1
YCH
2045 # STDIN should be redirected too. On UNIX-like systems, ffmpeg triggers
2046 # SIGTTOU if youtube-dl is run in the background.
067aa17e 2047 # See https://github.com/ytdl-org/youtube-dl/issues/955#issuecomment-209789656
cae97f65 2048 out, _ = subprocess.Popen(
54116803 2049 [encodeArgument(exe)] + args,
00ca7552 2050 stdin=subprocess.PIPE,
95807118
PH
2051 stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()
2052 except OSError:
2053 return False
cae97f65
PH
2054 if isinstance(out, bytes): # Python 2.x
2055 out = out.decode('ascii', 'ignore')
2056 return detect_exe_version(out, version_re, unrecognized)
2057
2058
2059def detect_exe_version(output, version_re=None, unrecognized='present'):
2060 assert isinstance(output, compat_str)
2061 if version_re is None:
2062 version_re = r'version\s+([-0-9._a-zA-Z]+)'
2063 m = re.search(version_re, output)
95807118
PH
2064 if m:
2065 return m.group(1)
2066 else:
2067 return unrecognized
2068
2069
b7ab0590 2070class PagedList(object):
dd26ced1
PH
2071 def __len__(self):
2072 # This is only useful for tests
2073 return len(self.getslice())
2074
9c44d242
PH
2075
2076class OnDemandPagedList(PagedList):
6be08ce6 2077 def __init__(self, pagefunc, pagesize, use_cache=True):
9c44d242
PH
2078 self._pagefunc = pagefunc
2079 self._pagesize = pagesize
b95dc034
YCH
2080 self._use_cache = use_cache
2081 if use_cache:
2082 self._cache = {}
9c44d242 2083
b7ab0590
PH
2084 def getslice(self, start=0, end=None):
2085 res = []
2086 for pagenum in itertools.count(start // self._pagesize):
2087 firstid = pagenum * self._pagesize
2088 nextfirstid = pagenum * self._pagesize + self._pagesize
2089 if start >= nextfirstid:
2090 continue
2091
b95dc034
YCH
2092 page_results = None
2093 if self._use_cache:
2094 page_results = self._cache.get(pagenum)
2095 if page_results is None:
2096 page_results = list(self._pagefunc(pagenum))
2097 if self._use_cache:
2098 self._cache[pagenum] = page_results
b7ab0590
PH
2099
2100 startv = (
2101 start % self._pagesize
2102 if firstid <= start < nextfirstid
2103 else 0)
2104
2105 endv = (
2106 ((end - 1) % self._pagesize) + 1
2107 if (end is not None and firstid <= end <= nextfirstid)
2108 else None)
2109
2110 if startv != 0 or endv is not None:
2111 page_results = page_results[startv:endv]
2112 res.extend(page_results)
2113
2114 # A little optimization - if current page is not "full", ie. does
2115 # not contain page_size videos then we can assume that this page
2116 # is the last one - there are no more ids on further pages -
2117 # i.e. no need to query again.
2118 if len(page_results) + startv < self._pagesize:
2119 break
2120
2121 # If we got the whole page, but the next page is not interesting,
2122 # break out early as well
2123 if end == nextfirstid:
2124 break
2125 return res
81c2f20b
PH
2126
2127
9c44d242
PH
2128class InAdvancePagedList(PagedList):
2129 def __init__(self, pagefunc, pagecount, pagesize):
2130 self._pagefunc = pagefunc
2131 self._pagecount = pagecount
2132 self._pagesize = pagesize
2133
2134 def getslice(self, start=0, end=None):
2135 res = []
2136 start_page = start // self._pagesize
2137 end_page = (
2138 self._pagecount if end is None else (end // self._pagesize + 1))
2139 skip_elems = start - start_page * self._pagesize
2140 only_more = None if end is None else end - start
2141 for pagenum in range(start_page, end_page):
2142 page = list(self._pagefunc(pagenum))
2143 if skip_elems:
2144 page = page[skip_elems:]
2145 skip_elems = None
2146 if only_more is not None:
2147 if len(page) < only_more:
2148 only_more -= len(page)
2149 else:
2150 page = page[:only_more]
2151 res.extend(page)
2152 break
2153 res.extend(page)
2154 return res
2155
2156
81c2f20b 2157def uppercase_escape(s):
676eb3f2 2158 unicode_escape = codecs.getdecoder('unicode_escape')
81c2f20b 2159 return re.sub(
a612753d 2160 r'\\U[0-9a-fA-F]{8}',
676eb3f2
PH
2161 lambda m: unicode_escape(m.group(0))[0],
2162 s)
0fe2ff78
YCH
2163
2164
2165def lowercase_escape(s):
2166 unicode_escape = codecs.getdecoder('unicode_escape')
2167 return re.sub(
2168 r'\\u[0-9a-fA-F]{4}',
2169 lambda m: unicode_escape(m.group(0))[0],
2170 s)
b53466e1 2171
d05cfe06
S
2172
2173def escape_rfc3986(s):
2174 """Escape non-ASCII characters as suggested by RFC 3986"""
8f9312c3 2175 if sys.version_info < (3, 0) and isinstance(s, compat_str):
d05cfe06 2176 s = s.encode('utf-8')
ecc0c5ee 2177 return compat_urllib_parse.quote(s, b"%/;:@&=+$,!~*'()?#[]")
d05cfe06
S
2178
2179
2180def escape_url(url):
2181 """Escape URL as suggested by RFC 3986"""
2182 url_parsed = compat_urllib_parse_urlparse(url)
2183 return url_parsed._replace(
efbed08d 2184 netloc=url_parsed.netloc.encode('idna').decode('ascii'),
d05cfe06
S
2185 path=escape_rfc3986(url_parsed.path),
2186 params=escape_rfc3986(url_parsed.params),
2187 query=escape_rfc3986(url_parsed.query),
2188 fragment=escape_rfc3986(url_parsed.fragment)
2189 ).geturl()
2190
62e609ab
PH
2191
2192def read_batch_urls(batch_fd):
2193 def fixup(url):
2194 if not isinstance(url, compat_str):
2195 url = url.decode('utf-8', 'replace')
28e614de 2196 BOM_UTF8 = '\xef\xbb\xbf'
62e609ab
PH
2197 if url.startswith(BOM_UTF8):
2198 url = url[len(BOM_UTF8):]
2199 url = url.strip()
2200 if url.startswith(('#', ';', ']')):
2201 return False
2202 return url
2203
2204 with contextlib.closing(batch_fd) as fd:
2205 return [url for url in map(fixup, fd) if url]
b74fa8cd
JMF
2206
2207
2208def urlencode_postdata(*args, **kargs):
15707c7e 2209 return compat_urllib_parse_urlencode(*args, **kargs).encode('ascii')
bcf89ce6
PH
2210
2211
38f9ef31 2212def update_url_query(url, query):
cacd9966
YCH
2213 if not query:
2214 return url
38f9ef31 2215 parsed_url = compat_urlparse.urlparse(url)
2216 qs = compat_parse_qs(parsed_url.query)
2217 qs.update(query)
2218 return compat_urlparse.urlunparse(parsed_url._replace(
15707c7e 2219 query=compat_urllib_parse_urlencode(qs, True)))
16392824 2220
8e60dc75 2221
ed0291d1
S
2222def update_Request(req, url=None, data=None, headers={}, query={}):
2223 req_headers = req.headers.copy()
2224 req_headers.update(headers)
2225 req_data = data or req.data
2226 req_url = update_url_query(url or req.get_full_url(), query)
95cf60e8
S
2227 req_get_method = req.get_method()
2228 if req_get_method == 'HEAD':
2229 req_type = HEADRequest
2230 elif req_get_method == 'PUT':
2231 req_type = PUTRequest
2232 else:
2233 req_type = compat_urllib_request.Request
ed0291d1
S
2234 new_req = req_type(
2235 req_url, data=req_data, headers=req_headers,
2236 origin_req_host=req.origin_req_host, unverifiable=req.unverifiable)
2237 if hasattr(req, 'timeout'):
2238 new_req.timeout = req.timeout
2239 return new_req
2240
2241
10c87c15 2242def _multipart_encode_impl(data, boundary):
0c265486
YCH
2243 content_type = 'multipart/form-data; boundary=%s' % boundary
2244
2245 out = b''
2246 for k, v in data.items():
2247 out += b'--' + boundary.encode('ascii') + b'\r\n'
2248 if isinstance(k, compat_str):
2249 k = k.encode('utf-8')
2250 if isinstance(v, compat_str):
2251 v = v.encode('utf-8')
2252 # RFC 2047 requires non-ASCII field names to be encoded, while RFC 7578
2253 # suggests sending UTF-8 directly. Firefox sends UTF-8, too
b2ad479d 2254 content = b'Content-Disposition: form-data; name="' + k + b'"\r\n\r\n' + v + b'\r\n'
0c265486
YCH
2255 if boundary.encode('ascii') in content:
2256 raise ValueError('Boundary overlaps with data')
2257 out += content
2258
2259 out += b'--' + boundary.encode('ascii') + b'--\r\n'
2260
2261 return out, content_type
2262
2263
2264def multipart_encode(data, boundary=None):
2265 '''
2266 Encode a dict to RFC 7578-compliant form-data
2267
2268 data:
2269 A dict where keys and values can be either Unicode or bytes-like
2270 objects.
2271 boundary:
2272 If specified a Unicode object, it's used as the boundary. Otherwise
2273 a random boundary is generated.
2274
2275 Reference: https://tools.ietf.org/html/rfc7578
2276 '''
2277 has_specified_boundary = boundary is not None
2278
2279 while True:
2280 if boundary is None:
2281 boundary = '---------------' + str(random.randrange(0x0fffffff, 0xffffffff))
2282
2283 try:
10c87c15 2284 out, content_type = _multipart_encode_impl(data, boundary)
0c265486
YCH
2285 break
2286 except ValueError:
2287 if has_specified_boundary:
2288 raise
2289 boundary = None
2290
2291 return out, content_type
2292
2293
86296ad2 2294def dict_get(d, key_or_keys, default=None, skip_false_values=True):
cbecc9b9
S
2295 if isinstance(key_or_keys, (list, tuple)):
2296 for key in key_or_keys:
86296ad2
S
2297 if key not in d or d[key] is None or skip_false_values and not d[key]:
2298 continue
2299 return d[key]
cbecc9b9
S
2300 return default
2301 return d.get(key_or_keys, default)
2302
2303
329ca3be 2304def try_get(src, getter, expected_type=None):
a32a9a7e
S
2305 if not isinstance(getter, (list, tuple)):
2306 getter = [getter]
2307 for get in getter:
2308 try:
2309 v = get(src)
2310 except (AttributeError, KeyError, TypeError, IndexError):
2311 pass
2312 else:
2313 if expected_type is None or isinstance(v, expected_type):
2314 return v
329ca3be
S
2315
2316
6cc62232
S
2317def merge_dicts(*dicts):
2318 merged = {}
2319 for a_dict in dicts:
2320 for k, v in a_dict.items():
2321 if v is None:
2322 continue
2323 if (k not in merged or
2324 (isinstance(v, compat_str) and v and
2325 isinstance(merged[k], compat_str) and
2326 not merged[k])):
2327 merged[k] = v
2328 return merged
2329
2330
8e60dc75
S
2331def encode_compat_str(string, encoding=preferredencoding(), errors='strict'):
2332 return string if isinstance(string, compat_str) else compat_str(string, encoding, errors)
2333
16392824 2334
a1a530b0
PH
2335US_RATINGS = {
2336 'G': 0,
2337 'PG': 10,
2338 'PG-13': 13,
2339 'R': 16,
2340 'NC': 18,
2341}
fac55558
PH
2342
2343
a8795327 2344TV_PARENTAL_GUIDELINES = {
5a16c9d9
RA
2345 'TV-Y': 0,
2346 'TV-Y7': 7,
2347 'TV-G': 0,
2348 'TV-PG': 0,
2349 'TV-14': 14,
2350 'TV-MA': 17,
a8795327
S
2351}
2352
2353
146c80e2 2354def parse_age_limit(s):
a8795327
S
2355 if type(s) == int:
2356 return s if 0 <= s <= 21 else None
2357 if not isinstance(s, compat_basestring):
d838b1bd 2358 return None
146c80e2 2359 m = re.match(r'^(?P<age>\d{1,2})\+?$', s)
a8795327
S
2360 if m:
2361 return int(m.group('age'))
2362 if s in US_RATINGS:
2363 return US_RATINGS[s]
5a16c9d9 2364 m = re.match(r'^TV[_-]?(%s)$' % '|'.join(k[3:] for k in TV_PARENTAL_GUIDELINES), s)
b8361187 2365 if m:
5a16c9d9 2366 return TV_PARENTAL_GUIDELINES['TV-' + m.group(1)]
b8361187 2367 return None
146c80e2
S
2368
2369
fac55558 2370def strip_jsonp(code):
609a61e3 2371 return re.sub(
5552c9eb 2372 r'''(?sx)^
e9c671d5 2373 (?:window\.)?(?P<func_name>[a-zA-Z0-9_.$]*)
5552c9eb
YCH
2374 (?:\s*&&\s*(?P=func_name))?
2375 \s*\(\s*(?P<callback_data>.*)\);?
2376 \s*?(?://[^\n]*)*$''',
2377 r'\g<callback_data>', code)
478c2c61
PH
2378
2379
e05f6939 2380def js_to_json(code):
4195096e
S
2381 COMMENT_RE = r'/\*(?:(?!\*/).)*?\*/|//[^\n]*'
2382 SKIP_RE = r'\s*(?:{comment})?\s*'.format(comment=COMMENT_RE)
2383 INTEGER_TABLE = (
2384 (r'(?s)^(0[xX][0-9a-fA-F]+){skip}:?$'.format(skip=SKIP_RE), 16),
2385 (r'(?s)^(0+[0-7]+){skip}:?$'.format(skip=SKIP_RE), 8),
2386 )
2387
e05f6939 2388 def fix_kv(m):
e7b6d122
PH
2389 v = m.group(0)
2390 if v in ('true', 'false', 'null'):
2391 return v
b3ee552e 2392 elif v.startswith('/*') or v.startswith('//') or v == ',':
bd1e4844 2393 return ""
2394
2395 if v[0] in ("'", '"'):
2396 v = re.sub(r'(?s)\\.|"', lambda m: {
e7b6d122 2397 '"': '\\"',
bd1e4844 2398 "\\'": "'",
2399 '\\\n': '',
2400 '\\x': '\\u00',
2401 }.get(m.group(0), m.group(0)), v[1:-1])
2402
89ac4a19
S
2403 for regex, base in INTEGER_TABLE:
2404 im = re.match(regex, v)
2405 if im:
e4659b45 2406 i = int(im.group(1), base)
89ac4a19
S
2407 return '"%d":' % i if v.endswith(':') else '%d' % i
2408
e7b6d122 2409 return '"%s"' % v
e05f6939 2410
bd1e4844 2411 return re.sub(r'''(?sx)
2412 "(?:[^"\\]*(?:\\\\|\\['"nurtbfx/\n]))*[^"\\]*"|
2413 '(?:[^'\\]*(?:\\\\|\\['"nurtbfx/\n]))*[^'\\]*'|
4195096e 2414 {comment}|,(?={skip}[\]}}])|
c384d537 2415 (?:(?<![0-9])[eE]|[a-df-zA-DF-Z_])[.a-zA-Z_0-9]*|
4195096e
S
2416 \b(?:0[xX][0-9a-fA-F]+|0+[0-7]+)(?:{skip}:)?|
2417 [0-9]+(?={skip}:)
2418 '''.format(comment=COMMENT_RE, skip=SKIP_RE), fix_kv, code)
e05f6939
PH
2419
2420
478c2c61
PH
2421def qualities(quality_ids):
2422 """ Get a numeric quality value out of a list of possible values """
2423 def q(qid):
2424 try:
2425 return quality_ids.index(qid)
2426 except ValueError:
2427 return -1
2428 return q
2429
acd69589
PH
2430
2431DEFAULT_OUTTMPL = '%(title)s-%(id)s.%(ext)s'
0a871f68 2432
a020a0dc
PH
2433
2434def limit_length(s, length):
2435 """ Add ellipses to overly long strings """
2436 if s is None:
2437 return None
2438 ELLIPSES = '...'
2439 if len(s) > length:
2440 return s[:length - len(ELLIPSES)] + ELLIPSES
2441 return s
48844745
PH
2442
2443
2444def version_tuple(v):
5f9b8394 2445 return tuple(int(e) for e in re.split(r'[-.]', v))
48844745
PH
2446
2447
2448def is_outdated_version(version, limit, assume_new=True):
2449 if not version:
2450 return not assume_new
2451 try:
2452 return version_tuple(version) < version_tuple(limit)
2453 except ValueError:
2454 return not assume_new
732ea2f0
PH
2455
2456
2457def ytdl_is_updateable():
2458 """ Returns if youtube-dl can be updated with -U """
2459 from zipimport import zipimporter
2460
2461 return isinstance(globals().get('__loader__'), zipimporter) or hasattr(sys, 'frozen')
7d4111ed
PH
2462
2463
2464def args_to_str(args):
2465 # Get a short string representation for a subprocess command
702ccf2d 2466 return ' '.join(compat_shlex_quote(a) for a in args)
2ccd1b10
PH
2467
2468
9b9c5355 2469def error_to_compat_str(err):
fdae2358
S
2470 err_str = str(err)
2471 # On python 2 error byte string must be decoded with proper
2472 # encoding rather than ascii
2473 if sys.version_info[0] < 3:
2474 err_str = err_str.decode(preferredencoding())
2475 return err_str
2476
2477
c460bdd5 2478def mimetype2ext(mt):
eb9ee194
S
2479 if mt is None:
2480 return None
2481
765ac263
JMF
2482 ext = {
2483 'audio/mp4': 'm4a',
6c33d24b
YCH
2484 # Per RFC 3003, audio/mpeg can be .mp1, .mp2 or .mp3. Here use .mp3 as
2485 # it's the most popular one
2486 'audio/mpeg': 'mp3',
765ac263
JMF
2487 }.get(mt)
2488 if ext is not None:
2489 return ext
2490
c460bdd5 2491 _, _, res = mt.rpartition('/')
6562d34a 2492 res = res.split(';')[0].strip().lower()
c460bdd5
PH
2493
2494 return {
f6861ec9 2495 '3gpp': '3gp',
cafcf657 2496 'smptett+xml': 'tt',
cafcf657 2497 'ttaf+xml': 'dfxp',
a0d8d704 2498 'ttml+xml': 'ttml',
f6861ec9 2499 'x-flv': 'flv',
a0d8d704 2500 'x-mp4-fragmented': 'mp4',
d4f05d47 2501 'x-ms-sami': 'sami',
a0d8d704 2502 'x-ms-wmv': 'wmv',
b4173f15
RA
2503 'mpegurl': 'm3u8',
2504 'x-mpegurl': 'm3u8',
2505 'vnd.apple.mpegurl': 'm3u8',
2506 'dash+xml': 'mpd',
b4173f15 2507 'f4m+xml': 'f4m',
f164b971 2508 'hds+xml': 'f4m',
e910fe2f 2509 'vnd.ms-sstr+xml': 'ism',
c2b2c7e1 2510 'quicktime': 'mov',
98ce1a3f 2511 'mp2t': 'ts',
c460bdd5
PH
2512 }.get(res, res)
2513
2514
4f3c5e06 2515def parse_codecs(codecs_str):
2516 # http://tools.ietf.org/html/rfc6381
2517 if not codecs_str:
2518 return {}
2519 splited_codecs = list(filter(None, map(
2520 lambda str: str.strip(), codecs_str.strip().strip(',').split(','))))
2521 vcodec, acodec = None, None
2522 for full_codec in splited_codecs:
2523 codec = full_codec.split('.')[0]
25d110be 2524 if codec in ('avc1', 'avc2', 'avc3', 'avc4', 'vp9', 'vp8', 'hev1', 'hev2', 'h263', 'h264', 'mp4v', 'hvc1', 'av01'):
4f3c5e06 2525 if not vcodec:
2526 vcodec = full_codec
60f5c9fb 2527 elif codec in ('mp4a', 'opus', 'vorbis', 'mp3', 'aac', 'ac-3', 'ec-3', 'eac3', 'dtsc', 'dtse', 'dtsh', 'dtsl'):
4f3c5e06 2528 if not acodec:
2529 acodec = full_codec
2530 else:
60f5c9fb 2531 write_string('WARNING: Unknown codec %s\n' % full_codec, sys.stderr)
4f3c5e06 2532 if not vcodec and not acodec:
2533 if len(splited_codecs) == 2:
2534 return {
2535 'vcodec': vcodec,
2536 'acodec': acodec,
2537 }
2538 elif len(splited_codecs) == 1:
2539 return {
2540 'vcodec': 'none',
2541 'acodec': vcodec,
2542 }
2543 else:
2544 return {
2545 'vcodec': vcodec or 'none',
2546 'acodec': acodec or 'none',
2547 }
2548 return {}
2549
2550
2ccd1b10 2551def urlhandle_detect_ext(url_handle):
79298173 2552 getheader = url_handle.headers.get
2ccd1b10 2553
b55ee18f
PH
2554 cd = getheader('Content-Disposition')
2555 if cd:
2556 m = re.match(r'attachment;\s*filename="(?P<filename>[^"]+)"', cd)
2557 if m:
2558 e = determine_ext(m.group('filename'), default_ext=None)
2559 if e:
2560 return e
2561
c460bdd5 2562 return mimetype2ext(getheader('Content-Type'))
05900629
PH
2563
2564
1e399778
YCH
2565def encode_data_uri(data, mime_type):
2566 return 'data:%s;base64,%s' % (mime_type, base64.b64encode(data).decode('ascii'))
2567
2568
05900629 2569def age_restricted(content_limit, age_limit):
6ec6cb4e 2570 """ Returns True iff the content should be blocked """
05900629
PH
2571
2572 if age_limit is None: # No limit set
2573 return False
2574 if content_limit is None:
2575 return False # Content available for everyone
2576 return age_limit < content_limit
61ca9a80
PH
2577
2578
2579def is_html(first_bytes):
2580 """ Detect whether a file contains HTML by examining its first bytes. """
2581
2582 BOMS = [
2583 (b'\xef\xbb\xbf', 'utf-8'),
2584 (b'\x00\x00\xfe\xff', 'utf-32-be'),
2585 (b'\xff\xfe\x00\x00', 'utf-32-le'),
2586 (b'\xff\xfe', 'utf-16-le'),
2587 (b'\xfe\xff', 'utf-16-be'),
2588 ]
2589 for bom, enc in BOMS:
2590 if first_bytes.startswith(bom):
2591 s = first_bytes[len(bom):].decode(enc, 'replace')
2592 break
2593 else:
2594 s = first_bytes.decode('utf-8', 'replace')
2595
2596 return re.match(r'^\s*<', s)
a055469f
PH
2597
2598
2599def determine_protocol(info_dict):
2600 protocol = info_dict.get('protocol')
2601 if protocol is not None:
2602 return protocol
2603
2604 url = info_dict['url']
2605 if url.startswith('rtmp'):
2606 return 'rtmp'
2607 elif url.startswith('mms'):
2608 return 'mms'
2609 elif url.startswith('rtsp'):
2610 return 'rtsp'
2611
2612 ext = determine_ext(url)
2613 if ext == 'm3u8':
2614 return 'm3u8'
2615 elif ext == 'f4m':
2616 return 'f4m'
2617
2618 return compat_urllib_parse_urlparse(url).scheme
cfb56d1a
PH
2619
2620
2621def render_table(header_row, data):
2622 """ Render a list of rows, each as a list of values """
2623 table = [header_row] + data
2624 max_lens = [max(len(compat_str(v)) for v in col) for col in zip(*table)]
2625 format_str = ' '.join('%-' + compat_str(ml + 1) + 's' for ml in max_lens[:-1]) + '%s'
2626 return '\n'.join(format_str % tuple(row) for row in table)
347de493
PH
2627
2628
2629def _match_one(filter_part, dct):
2630 COMPARISON_OPERATORS = {
2631 '<': operator.lt,
2632 '<=': operator.le,
2633 '>': operator.gt,
2634 '>=': operator.ge,
2635 '=': operator.eq,
2636 '!=': operator.ne,
2637 }
2638 operator_rex = re.compile(r'''(?x)\s*
2639 (?P<key>[a-z_]+)
2640 \s*(?P<op>%s)(?P<none_inclusive>\s*\?)?\s*
2641 (?:
2642 (?P<intval>[0-9.]+(?:[kKmMgGtTpPeEzZyY]i?[Bb]?)?)|
db13c16e 2643 (?P<quote>["\'])(?P<quotedstrval>(?:\\.|(?!(?P=quote)|\\).)+?)(?P=quote)|
347de493
PH
2644 (?P<strval>(?![0-9.])[a-z0-9A-Z]*)
2645 )
2646 \s*$
2647 ''' % '|'.join(map(re.escape, COMPARISON_OPERATORS.keys())))
2648 m = operator_rex.search(filter_part)
2649 if m:
2650 op = COMPARISON_OPERATORS[m.group('op')]
e5a088dc 2651 actual_value = dct.get(m.group('key'))
db13c16e
S
2652 if (m.group('quotedstrval') is not None or
2653 m.group('strval') is not None or
e5a088dc
S
2654 # If the original field is a string and matching comparisonvalue is
2655 # a number we should respect the origin of the original field
2656 # and process comparison value as a string (see
067aa17e 2657 # https://github.com/ytdl-org/youtube-dl/issues/11082).
e5a088dc
S
2658 actual_value is not None and m.group('intval') is not None and
2659 isinstance(actual_value, compat_str)):
347de493
PH
2660 if m.group('op') not in ('=', '!='):
2661 raise ValueError(
2662 'Operator %s does not support string values!' % m.group('op'))
db13c16e
S
2663 comparison_value = m.group('quotedstrval') or m.group('strval') or m.group('intval')
2664 quote = m.group('quote')
2665 if quote is not None:
2666 comparison_value = comparison_value.replace(r'\%s' % quote, quote)
347de493
PH
2667 else:
2668 try:
2669 comparison_value = int(m.group('intval'))
2670 except ValueError:
2671 comparison_value = parse_filesize(m.group('intval'))
2672 if comparison_value is None:
2673 comparison_value = parse_filesize(m.group('intval') + 'B')
2674 if comparison_value is None:
2675 raise ValueError(
2676 'Invalid integer value %r in filter part %r' % (
2677 m.group('intval'), filter_part))
347de493
PH
2678 if actual_value is None:
2679 return m.group('none_inclusive')
2680 return op(actual_value, comparison_value)
2681
2682 UNARY_OPERATORS = {
1cc47c66
S
2683 '': lambda v: (v is True) if isinstance(v, bool) else (v is not None),
2684 '!': lambda v: (v is False) if isinstance(v, bool) else (v is None),
347de493
PH
2685 }
2686 operator_rex = re.compile(r'''(?x)\s*
2687 (?P<op>%s)\s*(?P<key>[a-z_]+)
2688 \s*$
2689 ''' % '|'.join(map(re.escape, UNARY_OPERATORS.keys())))
2690 m = operator_rex.search(filter_part)
2691 if m:
2692 op = UNARY_OPERATORS[m.group('op')]
2693 actual_value = dct.get(m.group('key'))
2694 return op(actual_value)
2695
2696 raise ValueError('Invalid filter part %r' % filter_part)
2697
2698
2699def match_str(filter_str, dct):
2700 """ Filter a dictionary with a simple string syntax. Returns True (=passes filter) or false """
2701
2702 return all(
2703 _match_one(filter_part, dct) for filter_part in filter_str.split('&'))
2704
2705
2706def match_filter_func(filter_str):
2707 def _match_func(info_dict):
2708 if match_str(filter_str, info_dict):
2709 return None
2710 else:
2711 video_title = info_dict.get('title', info_dict.get('id', 'video'))
2712 return '%s does not pass filter %s, skipping ..' % (video_title, filter_str)
2713 return _match_func
91410c9b
PH
2714
2715
bf6427d2
YCH
2716def parse_dfxp_time_expr(time_expr):
2717 if not time_expr:
d631d5f9 2718 return
bf6427d2
YCH
2719
2720 mobj = re.match(r'^(?P<time_offset>\d+(?:\.\d+)?)s?$', time_expr)
2721 if mobj:
2722 return float(mobj.group('time_offset'))
2723
db2fe38b 2724 mobj = re.match(r'^(\d+):(\d\d):(\d\d(?:(?:\.|:)\d+)?)$', time_expr)
bf6427d2 2725 if mobj:
db2fe38b 2726 return 3600 * int(mobj.group(1)) + 60 * int(mobj.group(2)) + float(mobj.group(3).replace(':', '.'))
bf6427d2
YCH
2727
2728
c1c924ab
YCH
2729def srt_subtitles_timecode(seconds):
2730 return '%02d:%02d:%02d,%03d' % (seconds / 3600, (seconds % 3600) / 60, seconds % 60, (seconds % 1) * 1000)
bf6427d2
YCH
2731
2732
2733def dfxp2srt(dfxp_data):
3869028f
YCH
2734 '''
2735 @param dfxp_data A bytes-like object containing DFXP data
2736 @returns A unicode object containing converted SRT data
2737 '''
5b995f71 2738 LEGACY_NAMESPACES = (
3869028f
YCH
2739 (b'http://www.w3.org/ns/ttml', [
2740 b'http://www.w3.org/2004/11/ttaf1',
2741 b'http://www.w3.org/2006/04/ttaf1',
2742 b'http://www.w3.org/2006/10/ttaf1',
5b995f71 2743 ]),
3869028f
YCH
2744 (b'http://www.w3.org/ns/ttml#styling', [
2745 b'http://www.w3.org/ns/ttml#style',
5b995f71
RA
2746 ]),
2747 )
2748
2749 SUPPORTED_STYLING = [
2750 'color',
2751 'fontFamily',
2752 'fontSize',
2753 'fontStyle',
2754 'fontWeight',
2755 'textDecoration'
2756 ]
2757
4e335771 2758 _x = functools.partial(xpath_with_ns, ns_map={
261f4730 2759 'xml': 'http://www.w3.org/XML/1998/namespace',
4e335771 2760 'ttml': 'http://www.w3.org/ns/ttml',
5b995f71 2761 'tts': 'http://www.w3.org/ns/ttml#styling',
4e335771 2762 })
bf6427d2 2763
5b995f71
RA
2764 styles = {}
2765 default_style = {}
2766
87de7069 2767 class TTMLPElementParser(object):
5b995f71
RA
2768 _out = ''
2769 _unclosed_elements = []
2770 _applied_styles = []
bf6427d2 2771
2b14cb56 2772 def start(self, tag, attrib):
5b995f71
RA
2773 if tag in (_x('ttml:br'), 'br'):
2774 self._out += '\n'
2775 else:
2776 unclosed_elements = []
2777 style = {}
2778 element_style_id = attrib.get('style')
2779 if default_style:
2780 style.update(default_style)
2781 if element_style_id:
2782 style.update(styles.get(element_style_id, {}))
2783 for prop in SUPPORTED_STYLING:
2784 prop_val = attrib.get(_x('tts:' + prop))
2785 if prop_val:
2786 style[prop] = prop_val
2787 if style:
2788 font = ''
2789 for k, v in sorted(style.items()):
2790 if self._applied_styles and self._applied_styles[-1].get(k) == v:
2791 continue
2792 if k == 'color':
2793 font += ' color="%s"' % v
2794 elif k == 'fontSize':
2795 font += ' size="%s"' % v
2796 elif k == 'fontFamily':
2797 font += ' face="%s"' % v
2798 elif k == 'fontWeight' and v == 'bold':
2799 self._out += '<b>'
2800 unclosed_elements.append('b')
2801 elif k == 'fontStyle' and v == 'italic':
2802 self._out += '<i>'
2803 unclosed_elements.append('i')
2804 elif k == 'textDecoration' and v == 'underline':
2805 self._out += '<u>'
2806 unclosed_elements.append('u')
2807 if font:
2808 self._out += '<font' + font + '>'
2809 unclosed_elements.append('font')
2810 applied_style = {}
2811 if self._applied_styles:
2812 applied_style.update(self._applied_styles[-1])
2813 applied_style.update(style)
2814 self._applied_styles.append(applied_style)
2815 self._unclosed_elements.append(unclosed_elements)
bf6427d2 2816
2b14cb56 2817 def end(self, tag):
5b995f71
RA
2818 if tag not in (_x('ttml:br'), 'br'):
2819 unclosed_elements = self._unclosed_elements.pop()
2820 for element in reversed(unclosed_elements):
2821 self._out += '</%s>' % element
2822 if unclosed_elements and self._applied_styles:
2823 self._applied_styles.pop()
bf6427d2 2824
2b14cb56 2825 def data(self, data):
5b995f71 2826 self._out += data
2b14cb56 2827
2828 def close(self):
5b995f71 2829 return self._out.strip()
2b14cb56 2830
2831 def parse_node(node):
2832 target = TTMLPElementParser()
2833 parser = xml.etree.ElementTree.XMLParser(target=target)
2834 parser.feed(xml.etree.ElementTree.tostring(node))
2835 return parser.close()
bf6427d2 2836
5b995f71
RA
2837 for k, v in LEGACY_NAMESPACES:
2838 for ns in v:
2839 dfxp_data = dfxp_data.replace(ns, k)
2840
3869028f 2841 dfxp = compat_etree_fromstring(dfxp_data)
bf6427d2 2842 out = []
5b995f71 2843 paras = dfxp.findall(_x('.//ttml:p')) or dfxp.findall('.//p')
1b0427e6
YCH
2844
2845 if not paras:
2846 raise ValueError('Invalid dfxp/TTML subtitle')
bf6427d2 2847
5b995f71
RA
2848 repeat = False
2849 while True:
2850 for style in dfxp.findall(_x('.//ttml:style')):
261f4730
RA
2851 style_id = style.get('id') or style.get(_x('xml:id'))
2852 if not style_id:
2853 continue
5b995f71
RA
2854 parent_style_id = style.get('style')
2855 if parent_style_id:
2856 if parent_style_id not in styles:
2857 repeat = True
2858 continue
2859 styles[style_id] = styles[parent_style_id].copy()
2860 for prop in SUPPORTED_STYLING:
2861 prop_val = style.get(_x('tts:' + prop))
2862 if prop_val:
2863 styles.setdefault(style_id, {})[prop] = prop_val
2864 if repeat:
2865 repeat = False
2866 else:
2867 break
2868
2869 for p in ('body', 'div'):
2870 ele = xpath_element(dfxp, [_x('.//ttml:' + p), './/' + p])
2871 if ele is None:
2872 continue
2873 style = styles.get(ele.get('style'))
2874 if not style:
2875 continue
2876 default_style.update(style)
2877
bf6427d2 2878 for para, index in zip(paras, itertools.count(1)):
d631d5f9 2879 begin_time = parse_dfxp_time_expr(para.attrib.get('begin'))
7dff0363 2880 end_time = parse_dfxp_time_expr(para.attrib.get('end'))
d631d5f9
YCH
2881 dur = parse_dfxp_time_expr(para.attrib.get('dur'))
2882 if begin_time is None:
2883 continue
7dff0363 2884 if not end_time:
d631d5f9
YCH
2885 if not dur:
2886 continue
2887 end_time = begin_time + dur
bf6427d2
YCH
2888 out.append('%d\n%s --> %s\n%s\n\n' % (
2889 index,
c1c924ab
YCH
2890 srt_subtitles_timecode(begin_time),
2891 srt_subtitles_timecode(end_time),
bf6427d2
YCH
2892 parse_node(para)))
2893
2894 return ''.join(out)
2895
2896
66e289ba
S
2897def cli_option(params, command_option, param):
2898 param = params.get(param)
98e698f1
RA
2899 if param:
2900 param = compat_str(param)
66e289ba
S
2901 return [command_option, param] if param is not None else []
2902
2903
2904def cli_bool_option(params, command_option, param, true_value='true', false_value='false', separator=None):
2905 param = params.get(param)
5b232f46
S
2906 if param is None:
2907 return []
66e289ba
S
2908 assert isinstance(param, bool)
2909 if separator:
2910 return [command_option + separator + (true_value if param else false_value)]
2911 return [command_option, true_value if param else false_value]
2912
2913
2914def cli_valueless_option(params, command_option, param, expected_value=True):
2915 param = params.get(param)
2916 return [command_option] if param == expected_value else []
2917
2918
2919def cli_configuration_args(params, param, default=[]):
2920 ex_args = params.get(param)
2921 if ex_args is None:
2922 return default
2923 assert isinstance(ex_args, list)
2924 return ex_args
2925
2926
39672624
YCH
2927class ISO639Utils(object):
2928 # See http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt
2929 _lang_map = {
2930 'aa': 'aar',
2931 'ab': 'abk',
2932 'ae': 'ave',
2933 'af': 'afr',
2934 'ak': 'aka',
2935 'am': 'amh',
2936 'an': 'arg',
2937 'ar': 'ara',
2938 'as': 'asm',
2939 'av': 'ava',
2940 'ay': 'aym',
2941 'az': 'aze',
2942 'ba': 'bak',
2943 'be': 'bel',
2944 'bg': 'bul',
2945 'bh': 'bih',
2946 'bi': 'bis',
2947 'bm': 'bam',
2948 'bn': 'ben',
2949 'bo': 'bod',
2950 'br': 'bre',
2951 'bs': 'bos',
2952 'ca': 'cat',
2953 'ce': 'che',
2954 'ch': 'cha',
2955 'co': 'cos',
2956 'cr': 'cre',
2957 'cs': 'ces',
2958 'cu': 'chu',
2959 'cv': 'chv',
2960 'cy': 'cym',
2961 'da': 'dan',
2962 'de': 'deu',
2963 'dv': 'div',
2964 'dz': 'dzo',
2965 'ee': 'ewe',
2966 'el': 'ell',
2967 'en': 'eng',
2968 'eo': 'epo',
2969 'es': 'spa',
2970 'et': 'est',
2971 'eu': 'eus',
2972 'fa': 'fas',
2973 'ff': 'ful',
2974 'fi': 'fin',
2975 'fj': 'fij',
2976 'fo': 'fao',
2977 'fr': 'fra',
2978 'fy': 'fry',
2979 'ga': 'gle',
2980 'gd': 'gla',
2981 'gl': 'glg',
2982 'gn': 'grn',
2983 'gu': 'guj',
2984 'gv': 'glv',
2985 'ha': 'hau',
2986 'he': 'heb',
b7acc835 2987 'iw': 'heb', # Replaced by he in 1989 revision
39672624
YCH
2988 'hi': 'hin',
2989 'ho': 'hmo',
2990 'hr': 'hrv',
2991 'ht': 'hat',
2992 'hu': 'hun',
2993 'hy': 'hye',
2994 'hz': 'her',
2995 'ia': 'ina',
2996 'id': 'ind',
b7acc835 2997 'in': 'ind', # Replaced by id in 1989 revision
39672624
YCH
2998 'ie': 'ile',
2999 'ig': 'ibo',
3000 'ii': 'iii',
3001 'ik': 'ipk',
3002 'io': 'ido',
3003 'is': 'isl',
3004 'it': 'ita',
3005 'iu': 'iku',
3006 'ja': 'jpn',
3007 'jv': 'jav',
3008 'ka': 'kat',
3009 'kg': 'kon',
3010 'ki': 'kik',
3011 'kj': 'kua',
3012 'kk': 'kaz',
3013 'kl': 'kal',
3014 'km': 'khm',
3015 'kn': 'kan',
3016 'ko': 'kor',
3017 'kr': 'kau',
3018 'ks': 'kas',
3019 'ku': 'kur',
3020 'kv': 'kom',
3021 'kw': 'cor',
3022 'ky': 'kir',
3023 'la': 'lat',
3024 'lb': 'ltz',
3025 'lg': 'lug',
3026 'li': 'lim',
3027 'ln': 'lin',
3028 'lo': 'lao',
3029 'lt': 'lit',
3030 'lu': 'lub',
3031 'lv': 'lav',
3032 'mg': 'mlg',
3033 'mh': 'mah',
3034 'mi': 'mri',
3035 'mk': 'mkd',
3036 'ml': 'mal',
3037 'mn': 'mon',
3038 'mr': 'mar',
3039 'ms': 'msa',
3040 'mt': 'mlt',
3041 'my': 'mya',
3042 'na': 'nau',
3043 'nb': 'nob',
3044 'nd': 'nde',
3045 'ne': 'nep',
3046 'ng': 'ndo',
3047 'nl': 'nld',
3048 'nn': 'nno',
3049 'no': 'nor',
3050 'nr': 'nbl',
3051 'nv': 'nav',
3052 'ny': 'nya',
3053 'oc': 'oci',
3054 'oj': 'oji',
3055 'om': 'orm',
3056 'or': 'ori',
3057 'os': 'oss',
3058 'pa': 'pan',
3059 'pi': 'pli',
3060 'pl': 'pol',
3061 'ps': 'pus',
3062 'pt': 'por',
3063 'qu': 'que',
3064 'rm': 'roh',
3065 'rn': 'run',
3066 'ro': 'ron',
3067 'ru': 'rus',
3068 'rw': 'kin',
3069 'sa': 'san',
3070 'sc': 'srd',
3071 'sd': 'snd',
3072 'se': 'sme',
3073 'sg': 'sag',
3074 'si': 'sin',
3075 'sk': 'slk',
3076 'sl': 'slv',
3077 'sm': 'smo',
3078 'sn': 'sna',
3079 'so': 'som',
3080 'sq': 'sqi',
3081 'sr': 'srp',
3082 'ss': 'ssw',
3083 'st': 'sot',
3084 'su': 'sun',
3085 'sv': 'swe',
3086 'sw': 'swa',
3087 'ta': 'tam',
3088 'te': 'tel',
3089 'tg': 'tgk',
3090 'th': 'tha',
3091 'ti': 'tir',
3092 'tk': 'tuk',
3093 'tl': 'tgl',
3094 'tn': 'tsn',
3095 'to': 'ton',
3096 'tr': 'tur',
3097 'ts': 'tso',
3098 'tt': 'tat',
3099 'tw': 'twi',
3100 'ty': 'tah',
3101 'ug': 'uig',
3102 'uk': 'ukr',
3103 'ur': 'urd',
3104 'uz': 'uzb',
3105 've': 'ven',
3106 'vi': 'vie',
3107 'vo': 'vol',
3108 'wa': 'wln',
3109 'wo': 'wol',
3110 'xh': 'xho',
3111 'yi': 'yid',
e9a50fba 3112 'ji': 'yid', # Replaced by yi in 1989 revision
39672624
YCH
3113 'yo': 'yor',
3114 'za': 'zha',
3115 'zh': 'zho',
3116 'zu': 'zul',
3117 }
3118
3119 @classmethod
3120 def short2long(cls, code):
3121 """Convert language code from ISO 639-1 to ISO 639-2/T"""
3122 return cls._lang_map.get(code[:2])
3123
3124 @classmethod
3125 def long2short(cls, code):
3126 """Convert language code from ISO 639-2/T to ISO 639-1"""
3127 for short_name, long_name in cls._lang_map.items():
3128 if long_name == code:
3129 return short_name
3130
3131
4eb10f66
YCH
3132class ISO3166Utils(object):
3133 # From http://data.okfn.org/data/core/country-list
3134 _country_map = {
3135 'AF': 'Afghanistan',
3136 'AX': 'Åland Islands',
3137 'AL': 'Albania',
3138 'DZ': 'Algeria',
3139 'AS': 'American Samoa',
3140 'AD': 'Andorra',
3141 'AO': 'Angola',
3142 'AI': 'Anguilla',
3143 'AQ': 'Antarctica',
3144 'AG': 'Antigua and Barbuda',
3145 'AR': 'Argentina',
3146 'AM': 'Armenia',
3147 'AW': 'Aruba',
3148 'AU': 'Australia',
3149 'AT': 'Austria',
3150 'AZ': 'Azerbaijan',
3151 'BS': 'Bahamas',
3152 'BH': 'Bahrain',
3153 'BD': 'Bangladesh',
3154 'BB': 'Barbados',
3155 'BY': 'Belarus',
3156 'BE': 'Belgium',
3157 'BZ': 'Belize',
3158 'BJ': 'Benin',
3159 'BM': 'Bermuda',
3160 'BT': 'Bhutan',
3161 'BO': 'Bolivia, Plurinational State of',
3162 'BQ': 'Bonaire, Sint Eustatius and Saba',
3163 'BA': 'Bosnia and Herzegovina',
3164 'BW': 'Botswana',
3165 'BV': 'Bouvet Island',
3166 'BR': 'Brazil',
3167 'IO': 'British Indian Ocean Territory',
3168 'BN': 'Brunei Darussalam',
3169 'BG': 'Bulgaria',
3170 'BF': 'Burkina Faso',
3171 'BI': 'Burundi',
3172 'KH': 'Cambodia',
3173 'CM': 'Cameroon',
3174 'CA': 'Canada',
3175 'CV': 'Cape Verde',
3176 'KY': 'Cayman Islands',
3177 'CF': 'Central African Republic',
3178 'TD': 'Chad',
3179 'CL': 'Chile',
3180 'CN': 'China',
3181 'CX': 'Christmas Island',
3182 'CC': 'Cocos (Keeling) Islands',
3183 'CO': 'Colombia',
3184 'KM': 'Comoros',
3185 'CG': 'Congo',
3186 'CD': 'Congo, the Democratic Republic of the',
3187 'CK': 'Cook Islands',
3188 'CR': 'Costa Rica',
3189 'CI': 'Côte d\'Ivoire',
3190 'HR': 'Croatia',
3191 'CU': 'Cuba',
3192 'CW': 'Curaçao',
3193 'CY': 'Cyprus',
3194 'CZ': 'Czech Republic',
3195 'DK': 'Denmark',
3196 'DJ': 'Djibouti',
3197 'DM': 'Dominica',
3198 'DO': 'Dominican Republic',
3199 'EC': 'Ecuador',
3200 'EG': 'Egypt',
3201 'SV': 'El Salvador',
3202 'GQ': 'Equatorial Guinea',
3203 'ER': 'Eritrea',
3204 'EE': 'Estonia',
3205 'ET': 'Ethiopia',
3206 'FK': 'Falkland Islands (Malvinas)',
3207 'FO': 'Faroe Islands',
3208 'FJ': 'Fiji',
3209 'FI': 'Finland',
3210 'FR': 'France',
3211 'GF': 'French Guiana',
3212 'PF': 'French Polynesia',
3213 'TF': 'French Southern Territories',
3214 'GA': 'Gabon',
3215 'GM': 'Gambia',
3216 'GE': 'Georgia',
3217 'DE': 'Germany',
3218 'GH': 'Ghana',
3219 'GI': 'Gibraltar',
3220 'GR': 'Greece',
3221 'GL': 'Greenland',
3222 'GD': 'Grenada',
3223 'GP': 'Guadeloupe',
3224 'GU': 'Guam',
3225 'GT': 'Guatemala',
3226 'GG': 'Guernsey',
3227 'GN': 'Guinea',
3228 'GW': 'Guinea-Bissau',
3229 'GY': 'Guyana',
3230 'HT': 'Haiti',
3231 'HM': 'Heard Island and McDonald Islands',
3232 'VA': 'Holy See (Vatican City State)',
3233 'HN': 'Honduras',
3234 'HK': 'Hong Kong',
3235 'HU': 'Hungary',
3236 'IS': 'Iceland',
3237 'IN': 'India',
3238 'ID': 'Indonesia',
3239 'IR': 'Iran, Islamic Republic of',
3240 'IQ': 'Iraq',
3241 'IE': 'Ireland',
3242 'IM': 'Isle of Man',
3243 'IL': 'Israel',
3244 'IT': 'Italy',
3245 'JM': 'Jamaica',
3246 'JP': 'Japan',
3247 'JE': 'Jersey',
3248 'JO': 'Jordan',
3249 'KZ': 'Kazakhstan',
3250 'KE': 'Kenya',
3251 'KI': 'Kiribati',
3252 'KP': 'Korea, Democratic People\'s Republic of',
3253 'KR': 'Korea, Republic of',
3254 'KW': 'Kuwait',
3255 'KG': 'Kyrgyzstan',
3256 'LA': 'Lao People\'s Democratic Republic',
3257 'LV': 'Latvia',
3258 'LB': 'Lebanon',
3259 'LS': 'Lesotho',
3260 'LR': 'Liberia',
3261 'LY': 'Libya',
3262 'LI': 'Liechtenstein',
3263 'LT': 'Lithuania',
3264 'LU': 'Luxembourg',
3265 'MO': 'Macao',
3266 'MK': 'Macedonia, the Former Yugoslav Republic of',
3267 'MG': 'Madagascar',
3268 'MW': 'Malawi',
3269 'MY': 'Malaysia',
3270 'MV': 'Maldives',
3271 'ML': 'Mali',
3272 'MT': 'Malta',
3273 'MH': 'Marshall Islands',
3274 'MQ': 'Martinique',
3275 'MR': 'Mauritania',
3276 'MU': 'Mauritius',
3277 'YT': 'Mayotte',
3278 'MX': 'Mexico',
3279 'FM': 'Micronesia, Federated States of',
3280 'MD': 'Moldova, Republic of',
3281 'MC': 'Monaco',
3282 'MN': 'Mongolia',
3283 'ME': 'Montenegro',
3284 'MS': 'Montserrat',
3285 'MA': 'Morocco',
3286 'MZ': 'Mozambique',
3287 'MM': 'Myanmar',
3288 'NA': 'Namibia',
3289 'NR': 'Nauru',
3290 'NP': 'Nepal',
3291 'NL': 'Netherlands',
3292 'NC': 'New Caledonia',
3293 'NZ': 'New Zealand',
3294 'NI': 'Nicaragua',
3295 'NE': 'Niger',
3296 'NG': 'Nigeria',
3297 'NU': 'Niue',
3298 'NF': 'Norfolk Island',
3299 'MP': 'Northern Mariana Islands',
3300 'NO': 'Norway',
3301 'OM': 'Oman',
3302 'PK': 'Pakistan',
3303 'PW': 'Palau',
3304 'PS': 'Palestine, State of',
3305 'PA': 'Panama',
3306 'PG': 'Papua New Guinea',
3307 'PY': 'Paraguay',
3308 'PE': 'Peru',
3309 'PH': 'Philippines',
3310 'PN': 'Pitcairn',
3311 'PL': 'Poland',
3312 'PT': 'Portugal',
3313 'PR': 'Puerto Rico',
3314 'QA': 'Qatar',
3315 'RE': 'Réunion',
3316 'RO': 'Romania',
3317 'RU': 'Russian Federation',
3318 'RW': 'Rwanda',
3319 'BL': 'Saint Barthélemy',
3320 'SH': 'Saint Helena, Ascension and Tristan da Cunha',
3321 'KN': 'Saint Kitts and Nevis',
3322 'LC': 'Saint Lucia',
3323 'MF': 'Saint Martin (French part)',
3324 'PM': 'Saint Pierre and Miquelon',
3325 'VC': 'Saint Vincent and the Grenadines',
3326 'WS': 'Samoa',
3327 'SM': 'San Marino',
3328 'ST': 'Sao Tome and Principe',
3329 'SA': 'Saudi Arabia',
3330 'SN': 'Senegal',
3331 'RS': 'Serbia',
3332 'SC': 'Seychelles',
3333 'SL': 'Sierra Leone',
3334 'SG': 'Singapore',
3335 'SX': 'Sint Maarten (Dutch part)',
3336 'SK': 'Slovakia',
3337 'SI': 'Slovenia',
3338 'SB': 'Solomon Islands',
3339 'SO': 'Somalia',
3340 'ZA': 'South Africa',
3341 'GS': 'South Georgia and the South Sandwich Islands',
3342 'SS': 'South Sudan',
3343 'ES': 'Spain',
3344 'LK': 'Sri Lanka',
3345 'SD': 'Sudan',
3346 'SR': 'Suriname',
3347 'SJ': 'Svalbard and Jan Mayen',
3348 'SZ': 'Swaziland',
3349 'SE': 'Sweden',
3350 'CH': 'Switzerland',
3351 'SY': 'Syrian Arab Republic',
3352 'TW': 'Taiwan, Province of China',
3353 'TJ': 'Tajikistan',
3354 'TZ': 'Tanzania, United Republic of',
3355 'TH': 'Thailand',
3356 'TL': 'Timor-Leste',
3357 'TG': 'Togo',
3358 'TK': 'Tokelau',
3359 'TO': 'Tonga',
3360 'TT': 'Trinidad and Tobago',
3361 'TN': 'Tunisia',
3362 'TR': 'Turkey',
3363 'TM': 'Turkmenistan',
3364 'TC': 'Turks and Caicos Islands',
3365 'TV': 'Tuvalu',
3366 'UG': 'Uganda',
3367 'UA': 'Ukraine',
3368 'AE': 'United Arab Emirates',
3369 'GB': 'United Kingdom',
3370 'US': 'United States',
3371 'UM': 'United States Minor Outlying Islands',
3372 'UY': 'Uruguay',
3373 'UZ': 'Uzbekistan',
3374 'VU': 'Vanuatu',
3375 'VE': 'Venezuela, Bolivarian Republic of',
3376 'VN': 'Viet Nam',
3377 'VG': 'Virgin Islands, British',
3378 'VI': 'Virgin Islands, U.S.',
3379 'WF': 'Wallis and Futuna',
3380 'EH': 'Western Sahara',
3381 'YE': 'Yemen',
3382 'ZM': 'Zambia',
3383 'ZW': 'Zimbabwe',
3384 }
3385
3386 @classmethod
3387 def short2full(cls, code):
3388 """Convert an ISO 3166-2 country code to the corresponding full name"""
3389 return cls._country_map.get(code.upper())
3390
3391
773f291d
S
3392class GeoUtils(object):
3393 # Major IPv4 address blocks per country
3394 _country_ip_map = {
3395 'AD': '85.94.160.0/19',
3396 'AE': '94.200.0.0/13',
3397 'AF': '149.54.0.0/17',
3398 'AG': '209.59.64.0/18',
3399 'AI': '204.14.248.0/21',
3400 'AL': '46.99.0.0/16',
3401 'AM': '46.70.0.0/15',
3402 'AO': '105.168.0.0/13',
3403 'AP': '159.117.192.0/21',
3404 'AR': '181.0.0.0/12',
3405 'AS': '202.70.112.0/20',
3406 'AT': '84.112.0.0/13',
3407 'AU': '1.128.0.0/11',
3408 'AW': '181.41.0.0/18',
3409 'AZ': '5.191.0.0/16',
3410 'BA': '31.176.128.0/17',
3411 'BB': '65.48.128.0/17',
3412 'BD': '114.130.0.0/16',
3413 'BE': '57.0.0.0/8',
3414 'BF': '129.45.128.0/17',
3415 'BG': '95.42.0.0/15',
3416 'BH': '37.131.0.0/17',
3417 'BI': '154.117.192.0/18',
3418 'BJ': '137.255.0.0/16',
3419 'BL': '192.131.134.0/24',
3420 'BM': '196.12.64.0/18',
3421 'BN': '156.31.0.0/16',
3422 'BO': '161.56.0.0/16',
3423 'BQ': '161.0.80.0/20',
3424 'BR': '152.240.0.0/12',
3425 'BS': '24.51.64.0/18',
3426 'BT': '119.2.96.0/19',
3427 'BW': '168.167.0.0/16',
3428 'BY': '178.120.0.0/13',
3429 'BZ': '179.42.192.0/18',
3430 'CA': '99.224.0.0/11',
3431 'CD': '41.243.0.0/16',
3432 'CF': '196.32.200.0/21',
3433 'CG': '197.214.128.0/17',
3434 'CH': '85.0.0.0/13',
3435 'CI': '154.232.0.0/14',
3436 'CK': '202.65.32.0/19',
3437 'CL': '152.172.0.0/14',
3438 'CM': '165.210.0.0/15',
3439 'CN': '36.128.0.0/10',
3440 'CO': '181.240.0.0/12',
3441 'CR': '201.192.0.0/12',
3442 'CU': '152.206.0.0/15',
3443 'CV': '165.90.96.0/19',
3444 'CW': '190.88.128.0/17',
3445 'CY': '46.198.0.0/15',
3446 'CZ': '88.100.0.0/14',
3447 'DE': '53.0.0.0/8',
3448 'DJ': '197.241.0.0/17',
3449 'DK': '87.48.0.0/12',
3450 'DM': '192.243.48.0/20',
3451 'DO': '152.166.0.0/15',
3452 'DZ': '41.96.0.0/12',
3453 'EC': '186.68.0.0/15',
3454 'EE': '90.190.0.0/15',
3455 'EG': '156.160.0.0/11',
3456 'ER': '196.200.96.0/20',
3457 'ES': '88.0.0.0/11',
3458 'ET': '196.188.0.0/14',
3459 'EU': '2.16.0.0/13',
3460 'FI': '91.152.0.0/13',
3461 'FJ': '144.120.0.0/16',
3462 'FM': '119.252.112.0/20',
3463 'FO': '88.85.32.0/19',
3464 'FR': '90.0.0.0/9',
3465 'GA': '41.158.0.0/15',
3466 'GB': '25.0.0.0/8',
3467 'GD': '74.122.88.0/21',
3468 'GE': '31.146.0.0/16',
3469 'GF': '161.22.64.0/18',
3470 'GG': '62.68.160.0/19',
3471 'GH': '45.208.0.0/14',
3472 'GI': '85.115.128.0/19',
3473 'GL': '88.83.0.0/19',
3474 'GM': '160.182.0.0/15',
3475 'GN': '197.149.192.0/18',
3476 'GP': '104.250.0.0/19',
3477 'GQ': '105.235.224.0/20',
3478 'GR': '94.64.0.0/13',
3479 'GT': '168.234.0.0/16',
3480 'GU': '168.123.0.0/16',
3481 'GW': '197.214.80.0/20',
3482 'GY': '181.41.64.0/18',
3483 'HK': '113.252.0.0/14',
3484 'HN': '181.210.0.0/16',
3485 'HR': '93.136.0.0/13',
3486 'HT': '148.102.128.0/17',
3487 'HU': '84.0.0.0/14',
3488 'ID': '39.192.0.0/10',
3489 'IE': '87.32.0.0/12',
3490 'IL': '79.176.0.0/13',
3491 'IM': '5.62.80.0/20',
3492 'IN': '117.192.0.0/10',
3493 'IO': '203.83.48.0/21',
3494 'IQ': '37.236.0.0/14',
3495 'IR': '2.176.0.0/12',
3496 'IS': '82.221.0.0/16',
3497 'IT': '79.0.0.0/10',
3498 'JE': '87.244.64.0/18',
3499 'JM': '72.27.0.0/17',
3500 'JO': '176.29.0.0/16',
3501 'JP': '126.0.0.0/8',
3502 'KE': '105.48.0.0/12',
3503 'KG': '158.181.128.0/17',
3504 'KH': '36.37.128.0/17',
3505 'KI': '103.25.140.0/22',
3506 'KM': '197.255.224.0/20',
3507 'KN': '198.32.32.0/19',
3508 'KP': '175.45.176.0/22',
3509 'KR': '175.192.0.0/10',
3510 'KW': '37.36.0.0/14',
3511 'KY': '64.96.0.0/15',
3512 'KZ': '2.72.0.0/13',
3513 'LA': '115.84.64.0/18',
3514 'LB': '178.135.0.0/16',
3515 'LC': '192.147.231.0/24',
3516 'LI': '82.117.0.0/19',
3517 'LK': '112.134.0.0/15',
3518 'LR': '41.86.0.0/19',
3519 'LS': '129.232.0.0/17',
3520 'LT': '78.56.0.0/13',
3521 'LU': '188.42.0.0/16',
3522 'LV': '46.109.0.0/16',
3523 'LY': '41.252.0.0/14',
3524 'MA': '105.128.0.0/11',
3525 'MC': '88.209.64.0/18',
3526 'MD': '37.246.0.0/16',
3527 'ME': '178.175.0.0/17',
3528 'MF': '74.112.232.0/21',
3529 'MG': '154.126.0.0/17',
3530 'MH': '117.103.88.0/21',
3531 'MK': '77.28.0.0/15',
3532 'ML': '154.118.128.0/18',
3533 'MM': '37.111.0.0/17',
3534 'MN': '49.0.128.0/17',
3535 'MO': '60.246.0.0/16',
3536 'MP': '202.88.64.0/20',
3537 'MQ': '109.203.224.0/19',
3538 'MR': '41.188.64.0/18',
3539 'MS': '208.90.112.0/22',
3540 'MT': '46.11.0.0/16',
3541 'MU': '105.16.0.0/12',
3542 'MV': '27.114.128.0/18',
3543 'MW': '105.234.0.0/16',
3544 'MX': '187.192.0.0/11',
3545 'MY': '175.136.0.0/13',
3546 'MZ': '197.218.0.0/15',
3547 'NA': '41.182.0.0/16',
3548 'NC': '101.101.0.0/18',
3549 'NE': '197.214.0.0/18',
3550 'NF': '203.17.240.0/22',
3551 'NG': '105.112.0.0/12',
3552 'NI': '186.76.0.0/15',
3553 'NL': '145.96.0.0/11',
3554 'NO': '84.208.0.0/13',
3555 'NP': '36.252.0.0/15',
3556 'NR': '203.98.224.0/19',
3557 'NU': '49.156.48.0/22',
3558 'NZ': '49.224.0.0/14',
3559 'OM': '5.36.0.0/15',
3560 'PA': '186.72.0.0/15',
3561 'PE': '186.160.0.0/14',
3562 'PF': '123.50.64.0/18',
3563 'PG': '124.240.192.0/19',
3564 'PH': '49.144.0.0/13',
3565 'PK': '39.32.0.0/11',
3566 'PL': '83.0.0.0/11',
3567 'PM': '70.36.0.0/20',
3568 'PR': '66.50.0.0/16',
3569 'PS': '188.161.0.0/16',
3570 'PT': '85.240.0.0/13',
3571 'PW': '202.124.224.0/20',
3572 'PY': '181.120.0.0/14',
3573 'QA': '37.210.0.0/15',
3574 'RE': '139.26.0.0/16',
3575 'RO': '79.112.0.0/13',
3576 'RS': '178.220.0.0/14',
3577 'RU': '5.136.0.0/13',
3578 'RW': '105.178.0.0/15',
3579 'SA': '188.48.0.0/13',
3580 'SB': '202.1.160.0/19',
3581 'SC': '154.192.0.0/11',
3582 'SD': '154.96.0.0/13',
3583 'SE': '78.64.0.0/12',
3584 'SG': '152.56.0.0/14',
3585 'SI': '188.196.0.0/14',
3586 'SK': '78.98.0.0/15',
3587 'SL': '197.215.0.0/17',
3588 'SM': '89.186.32.0/19',
3589 'SN': '41.82.0.0/15',
3590 'SO': '197.220.64.0/19',
3591 'SR': '186.179.128.0/17',
3592 'SS': '105.235.208.0/21',
3593 'ST': '197.159.160.0/19',
3594 'SV': '168.243.0.0/16',
3595 'SX': '190.102.0.0/20',
3596 'SY': '5.0.0.0/16',
3597 'SZ': '41.84.224.0/19',
3598 'TC': '65.255.48.0/20',
3599 'TD': '154.68.128.0/19',
3600 'TG': '196.168.0.0/14',
3601 'TH': '171.96.0.0/13',
3602 'TJ': '85.9.128.0/18',
3603 'TK': '27.96.24.0/21',
3604 'TL': '180.189.160.0/20',
3605 'TM': '95.85.96.0/19',
3606 'TN': '197.0.0.0/11',
3607 'TO': '175.176.144.0/21',
3608 'TR': '78.160.0.0/11',
3609 'TT': '186.44.0.0/15',
3610 'TV': '202.2.96.0/19',
3611 'TW': '120.96.0.0/11',
3612 'TZ': '156.156.0.0/14',
3613 'UA': '93.72.0.0/13',
3614 'UG': '154.224.0.0/13',
3615 'US': '3.0.0.0/8',
3616 'UY': '167.56.0.0/13',
3617 'UZ': '82.215.64.0/18',
3618 'VA': '212.77.0.0/19',
3619 'VC': '24.92.144.0/20',
3620 'VE': '186.88.0.0/13',
3621 'VG': '172.103.64.0/18',
3622 'VI': '146.226.0.0/16',
3623 'VN': '14.160.0.0/11',
3624 'VU': '202.80.32.0/20',
3625 'WF': '117.20.32.0/21',
3626 'WS': '202.4.32.0/19',
3627 'YE': '134.35.0.0/16',
3628 'YT': '41.242.116.0/22',
3629 'ZA': '41.0.0.0/11',
3630 'ZM': '165.56.0.0/13',
3631 'ZW': '41.85.192.0/19',
3632 }
3633
3634 @classmethod
5f95927a
S
3635 def random_ipv4(cls, code_or_block):
3636 if len(code_or_block) == 2:
3637 block = cls._country_ip_map.get(code_or_block.upper())
3638 if not block:
3639 return None
3640 else:
3641 block = code_or_block
773f291d
S
3642 addr, preflen = block.split('/')
3643 addr_min = compat_struct_unpack('!L', socket.inet_aton(addr))[0]
3644 addr_max = addr_min | (0xffffffff >> int(preflen))
18a0defa 3645 return compat_str(socket.inet_ntoa(
4248dad9 3646 compat_struct_pack('!L', random.randint(addr_min, addr_max))))
773f291d
S
3647
3648
91410c9b 3649class PerRequestProxyHandler(compat_urllib_request.ProxyHandler):
2461f79d
PH
3650 def __init__(self, proxies=None):
3651 # Set default handlers
3652 for type in ('http', 'https'):
3653 setattr(self, '%s_open' % type,
3654 lambda r, proxy='__noproxy__', type=type, meth=self.proxy_open:
3655 meth(r, proxy, type))
38e87f6c 3656 compat_urllib_request.ProxyHandler.__init__(self, proxies)
2461f79d 3657
91410c9b 3658 def proxy_open(self, req, proxy, type):
2461f79d 3659 req_proxy = req.headers.get('Ytdl-request-proxy')
91410c9b
PH
3660 if req_proxy is not None:
3661 proxy = req_proxy
2461f79d
PH
3662 del req.headers['Ytdl-request-proxy']
3663
3664 if proxy == '__noproxy__':
3665 return None # No Proxy
51fb4995 3666 if compat_urlparse.urlparse(proxy).scheme.lower() in ('socks', 'socks4', 'socks4a', 'socks5'):
71aff188
YCH
3667 req.add_header('Ytdl-socks-proxy', proxy)
3668 # youtube-dl's http/https handlers do wrapping the socket with socks
3669 return None
91410c9b
PH
3670 return compat_urllib_request.ProxyHandler.proxy_open(
3671 self, req, proxy, type)
5bc880b9
YCH
3672
3673
0a5445dd
YCH
3674# Both long_to_bytes and bytes_to_long are adapted from PyCrypto, which is
3675# released into Public Domain
3676# https://github.com/dlitz/pycrypto/blob/master/lib/Crypto/Util/number.py#L387
3677
3678def long_to_bytes(n, blocksize=0):
3679 """long_to_bytes(n:long, blocksize:int) : string
3680 Convert a long integer to a byte string.
3681
3682 If optional blocksize is given and greater than zero, pad the front of the
3683 byte string with binary zeros so that the length is a multiple of
3684 blocksize.
3685 """
3686 # after much testing, this algorithm was deemed to be the fastest
3687 s = b''
3688 n = int(n)
3689 while n > 0:
3690 s = compat_struct_pack('>I', n & 0xffffffff) + s
3691 n = n >> 32
3692 # strip off leading zeros
3693 for i in range(len(s)):
3694 if s[i] != b'\000'[0]:
3695 break
3696 else:
3697 # only happens when n == 0
3698 s = b'\000'
3699 i = 0
3700 s = s[i:]
3701 # add back some pad bytes. this could be done more efficiently w.r.t. the
3702 # de-padding being done above, but sigh...
3703 if blocksize > 0 and len(s) % blocksize:
3704 s = (blocksize - len(s) % blocksize) * b'\000' + s
3705 return s
3706
3707
3708def bytes_to_long(s):
3709 """bytes_to_long(string) : long
3710 Convert a byte string to a long integer.
3711
3712 This is (essentially) the inverse of long_to_bytes().
3713 """
3714 acc = 0
3715 length = len(s)
3716 if length % 4:
3717 extra = (4 - length % 4)
3718 s = b'\000' * extra + s
3719 length = length + extra
3720 for i in range(0, length, 4):
3721 acc = (acc << 32) + compat_struct_unpack('>I', s[i:i + 4])[0]
3722 return acc
3723
3724
5bc880b9
YCH
3725def ohdave_rsa_encrypt(data, exponent, modulus):
3726 '''
3727 Implement OHDave's RSA algorithm. See http://www.ohdave.com/rsa/
3728
3729 Input:
3730 data: data to encrypt, bytes-like object
3731 exponent, modulus: parameter e and N of RSA algorithm, both integer
3732 Output: hex string of encrypted data
3733
3734 Limitation: supports one block encryption only
3735 '''
3736
3737 payload = int(binascii.hexlify(data[::-1]), 16)
3738 encrypted = pow(payload, exponent, modulus)
3739 return '%x' % encrypted
81bdc8fd
YCH
3740
3741
f48409c7
YCH
3742def pkcs1pad(data, length):
3743 """
3744 Padding input data with PKCS#1 scheme
3745
3746 @param {int[]} data input data
3747 @param {int} length target length
3748 @returns {int[]} padded data
3749 """
3750 if len(data) > length - 11:
3751 raise ValueError('Input data too long for PKCS#1 padding')
3752
3753 pseudo_random = [random.randint(0, 254) for _ in range(length - len(data) - 3)]
3754 return [0, 2] + pseudo_random + [0] + data
3755
3756
5eb6bdce 3757def encode_base_n(num, n, table=None):
59f898b7 3758 FULL_TABLE = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
59f898b7
YCH
3759 if not table:
3760 table = FULL_TABLE[:n]
3761
5eb6bdce
YCH
3762 if n > len(table):
3763 raise ValueError('base %d exceeds table length %d' % (n, len(table)))
3764
3765 if num == 0:
3766 return table[0]
3767
81bdc8fd
YCH
3768 ret = ''
3769 while num:
3770 ret = table[num % n] + ret
3771 num = num // n
3772 return ret
f52354a8
YCH
3773
3774
3775def decode_packed_codes(code):
06b3fe29 3776 mobj = re.search(PACKED_CODES_RE, code)
f52354a8
YCH
3777 obfucasted_code, base, count, symbols = mobj.groups()
3778 base = int(base)
3779 count = int(count)
3780 symbols = symbols.split('|')
3781 symbol_table = {}
3782
3783 while count:
3784 count -= 1
5eb6bdce 3785 base_n_count = encode_base_n(count, base)
f52354a8
YCH
3786 symbol_table[base_n_count] = symbols[count] or base_n_count
3787
3788 return re.sub(
3789 r'\b(\w+)\b', lambda mobj: symbol_table[mobj.group(0)],
3790 obfucasted_code)
e154c651 3791
3792
3793def parse_m3u8_attributes(attrib):
3794 info = {}
3795 for (key, val) in re.findall(r'(?P<key>[A-Z0-9-]+)=(?P<val>"[^"]+"|[^",]+)(?:,|$)', attrib):
3796 if val.startswith('"'):
3797 val = val[1:-1]
3798 info[key] = val
3799 return info
1143535d
YCH
3800
3801
3802def urshift(val, n):
3803 return val >> n if val >= 0 else (val + 0x100000000) >> n
d3f8e038
YCH
3804
3805
3806# Based on png2str() written by @gdkchan and improved by @yokrysty
067aa17e 3807# Originally posted at https://github.com/ytdl-org/youtube-dl/issues/9706
d3f8e038
YCH
3808def decode_png(png_data):
3809 # Reference: https://www.w3.org/TR/PNG/
3810 header = png_data[8:]
3811
3812 if png_data[:8] != b'\x89PNG\x0d\x0a\x1a\x0a' or header[4:8] != b'IHDR':
3813 raise IOError('Not a valid PNG file.')
3814
3815 int_map = {1: '>B', 2: '>H', 4: '>I'}
3816 unpack_integer = lambda x: compat_struct_unpack(int_map[len(x)], x)[0]
3817
3818 chunks = []
3819
3820 while header:
3821 length = unpack_integer(header[:4])
3822 header = header[4:]
3823
3824 chunk_type = header[:4]
3825 header = header[4:]
3826
3827 chunk_data = header[:length]
3828 header = header[length:]
3829
3830 header = header[4:] # Skip CRC
3831
3832 chunks.append({
3833 'type': chunk_type,
3834 'length': length,
3835 'data': chunk_data
3836 })
3837
3838 ihdr = chunks[0]['data']
3839
3840 width = unpack_integer(ihdr[:4])
3841 height = unpack_integer(ihdr[4:8])
3842
3843 idat = b''
3844
3845 for chunk in chunks:
3846 if chunk['type'] == b'IDAT':
3847 idat += chunk['data']
3848
3849 if not idat:
3850 raise IOError('Unable to read PNG data.')
3851
3852 decompressed_data = bytearray(zlib.decompress(idat))
3853
3854 stride = width * 3
3855 pixels = []
3856
3857 def _get_pixel(idx):
3858 x = idx % stride
3859 y = idx // stride
3860 return pixels[y][x]
3861
3862 for y in range(height):
3863 basePos = y * (1 + stride)
3864 filter_type = decompressed_data[basePos]
3865
3866 current_row = []
3867
3868 pixels.append(current_row)
3869
3870 for x in range(stride):
3871 color = decompressed_data[1 + basePos + x]
3872 basex = y * stride + x
3873 left = 0
3874 up = 0
3875
3876 if x > 2:
3877 left = _get_pixel(basex - 3)
3878 if y > 0:
3879 up = _get_pixel(basex - stride)
3880
3881 if filter_type == 1: # Sub
3882 color = (color + left) & 0xff
3883 elif filter_type == 2: # Up
3884 color = (color + up) & 0xff
3885 elif filter_type == 3: # Average
3886 color = (color + ((left + up) >> 1)) & 0xff
3887 elif filter_type == 4: # Paeth
3888 a = left
3889 b = up
3890 c = 0
3891
3892 if x > 2 and y > 0:
3893 c = _get_pixel(basex - stride - 3)
3894
3895 p = a + b - c
3896
3897 pa = abs(p - a)
3898 pb = abs(p - b)
3899 pc = abs(p - c)
3900
3901 if pa <= pb and pa <= pc:
3902 color = (color + a) & 0xff
3903 elif pb <= pc:
3904 color = (color + b) & 0xff
3905 else:
3906 color = (color + c) & 0xff
3907
3908 current_row.append(color)
3909
3910 return width, height, pixels
efa97bdc
YCH
3911
3912
3913def write_xattr(path, key, value):
3914 # This mess below finds the best xattr tool for the job
3915 try:
3916 # try the pyxattr module...
3917 import xattr
3918
53a7e3d2
YCH
3919 if hasattr(xattr, 'set'): # pyxattr
3920 # Unicode arguments are not supported in python-pyxattr until
3921 # version 0.5.0
067aa17e 3922 # See https://github.com/ytdl-org/youtube-dl/issues/5498
53a7e3d2
YCH
3923 pyxattr_required_version = '0.5.0'
3924 if version_tuple(xattr.__version__) < version_tuple(pyxattr_required_version):
3925 # TODO: fallback to CLI tools
3926 raise XAttrUnavailableError(
3927 'python-pyxattr is detected but is too old. '
3928 'youtube-dl requires %s or above while your version is %s. '
3929 'Falling back to other xattr implementations' % (
3930 pyxattr_required_version, xattr.__version__))
3931
3932 setxattr = xattr.set
3933 else: # xattr
3934 setxattr = xattr.setxattr
efa97bdc
YCH
3935
3936 try:
53a7e3d2 3937 setxattr(path, key, value)
efa97bdc
YCH
3938 except EnvironmentError as e:
3939 raise XAttrMetadataError(e.errno, e.strerror)
3940
3941 except ImportError:
3942 if compat_os_name == 'nt':
3943 # Write xattrs to NTFS Alternate Data Streams:
3944 # http://en.wikipedia.org/wiki/NTFS#Alternate_data_streams_.28ADS.29
3945 assert ':' not in key
3946 assert os.path.exists(path)
3947
3948 ads_fn = path + ':' + key
3949 try:
3950 with open(ads_fn, 'wb') as f:
3951 f.write(value)
3952 except EnvironmentError as e:
3953 raise XAttrMetadataError(e.errno, e.strerror)
3954 else:
3955 user_has_setfattr = check_executable('setfattr', ['--version'])
3956 user_has_xattr = check_executable('xattr', ['-h'])
3957
3958 if user_has_setfattr or user_has_xattr:
3959
3960 value = value.decode('utf-8')
3961 if user_has_setfattr:
3962 executable = 'setfattr'
3963 opts = ['-n', key, '-v', value]
3964 elif user_has_xattr:
3965 executable = 'xattr'
3966 opts = ['-w', key, value]
3967
3968 cmd = ([encodeFilename(executable, True)] +
3969 [encodeArgument(o) for o in opts] +
3970 [encodeFilename(path, True)])
3971
3972 try:
3973 p = subprocess.Popen(
3974 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
3975 except EnvironmentError as e:
3976 raise XAttrMetadataError(e.errno, e.strerror)
3977 stdout, stderr = p.communicate()
3978 stderr = stderr.decode('utf-8', 'replace')
3979 if p.returncode != 0:
3980 raise XAttrMetadataError(p.returncode, stderr)
3981
3982 else:
3983 # On Unix, and can't find pyxattr, setfattr, or xattr.
3984 if sys.platform.startswith('linux'):
3985 raise XAttrUnavailableError(
3986 "Couldn't find a tool to set the xattrs. "
3987 "Install either the python 'pyxattr' or 'xattr' "
3988 "modules, or the GNU 'attr' package "
3989 "(which contains the 'setfattr' tool).")
3990 else:
3991 raise XAttrUnavailableError(
3992 "Couldn't find a tool to set the xattrs. "
3993 "Install either the python 'xattr' module, "
3994 "or the 'xattr' binary.")
0c265486
YCH
3995
3996
3997def random_birthday(year_field, month_field, day_field):
aa374bc7
AS
3998 start_date = datetime.date(1950, 1, 1)
3999 end_date = datetime.date(1995, 12, 31)
4000 offset = random.randint(0, (end_date - start_date).days)
4001 random_date = start_date + datetime.timedelta(offset)
0c265486 4002 return {
aa374bc7
AS
4003 year_field: str(random_date.year),
4004 month_field: str(random_date.month),
4005 day_field: str(random_date.day),
0c265486 4006 }