]> jfr.im git - yt-dlp.git/blob - youtube_dlc/utils.py
Plugin support
[yt-dlp.git] / youtube_dlc / utils.py
1 #!/usr/bin/env python
2 # coding: utf-8
3
4 from __future__ import unicode_literals
5
6 import base64
7 import binascii
8 import calendar
9 import codecs
10 import collections
11 import contextlib
12 import ctypes
13 import datetime
14 import email.utils
15 import email.header
16 import errno
17 import functools
18 import gzip
19 import imp
20 import io
21 import itertools
22 import json
23 import locale
24 import math
25 import operator
26 import os
27 import platform
28 import random
29 import re
30 import socket
31 import ssl
32 import subprocess
33 import sys
34 import tempfile
35 import time
36 import traceback
37 import xml.etree.ElementTree
38 import zlib
39
40 from .compat import (
41 compat_HTMLParseError,
42 compat_HTMLParser,
43 compat_basestring,
44 compat_chr,
45 compat_cookiejar,
46 compat_ctypes_WINFUNCTYPE,
47 compat_etree_fromstring,
48 compat_expanduser,
49 compat_html_entities,
50 compat_html_entities_html5,
51 compat_http_client,
52 compat_integer_types,
53 compat_kwargs,
54 compat_os_name,
55 compat_parse_qs,
56 compat_shlex_quote,
57 compat_str,
58 compat_struct_pack,
59 compat_struct_unpack,
60 compat_urllib_error,
61 compat_urllib_parse,
62 compat_urllib_parse_urlencode,
63 compat_urllib_parse_urlparse,
64 compat_urllib_parse_urlunparse,
65 compat_urllib_parse_quote,
66 compat_urllib_parse_quote_plus,
67 compat_urllib_parse_unquote_plus,
68 compat_urllib_request,
69 compat_urlparse,
70 compat_xpath,
71 )
72
73 from .socks import (
74 ProxyType,
75 sockssocket,
76 )
77
78
79 def register_socks_protocols():
80 # "Register" SOCKS protocols
81 # In Python < 2.6.5, urlsplit() suffers from bug https://bugs.python.org/issue7904
82 # URLs with protocols not in urlparse.uses_netloc are not handled correctly
83 for scheme in ('socks', 'socks4', 'socks4a', 'socks5'):
84 if scheme not in compat_urlparse.uses_netloc:
85 compat_urlparse.uses_netloc.append(scheme)
86
87
88 # This is not clearly defined otherwise
89 compiled_regex_type = type(re.compile(''))
90
91
92 def random_user_agent():
93 _USER_AGENT_TPL = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36'
94 _CHROME_VERSIONS = (
95 '74.0.3729.129',
96 '76.0.3780.3',
97 '76.0.3780.2',
98 '74.0.3729.128',
99 '76.0.3780.1',
100 '76.0.3780.0',
101 '75.0.3770.15',
102 '74.0.3729.127',
103 '74.0.3729.126',
104 '76.0.3779.1',
105 '76.0.3779.0',
106 '75.0.3770.14',
107 '74.0.3729.125',
108 '76.0.3778.1',
109 '76.0.3778.0',
110 '75.0.3770.13',
111 '74.0.3729.124',
112 '74.0.3729.123',
113 '73.0.3683.121',
114 '76.0.3777.1',
115 '76.0.3777.0',
116 '75.0.3770.12',
117 '74.0.3729.122',
118 '76.0.3776.4',
119 '75.0.3770.11',
120 '74.0.3729.121',
121 '76.0.3776.3',
122 '76.0.3776.2',
123 '73.0.3683.120',
124 '74.0.3729.120',
125 '74.0.3729.119',
126 '74.0.3729.118',
127 '76.0.3776.1',
128 '76.0.3776.0',
129 '76.0.3775.5',
130 '75.0.3770.10',
131 '74.0.3729.117',
132 '76.0.3775.4',
133 '76.0.3775.3',
134 '74.0.3729.116',
135 '75.0.3770.9',
136 '76.0.3775.2',
137 '76.0.3775.1',
138 '76.0.3775.0',
139 '75.0.3770.8',
140 '74.0.3729.115',
141 '74.0.3729.114',
142 '76.0.3774.1',
143 '76.0.3774.0',
144 '75.0.3770.7',
145 '74.0.3729.113',
146 '74.0.3729.112',
147 '74.0.3729.111',
148 '76.0.3773.1',
149 '76.0.3773.0',
150 '75.0.3770.6',
151 '74.0.3729.110',
152 '74.0.3729.109',
153 '76.0.3772.1',
154 '76.0.3772.0',
155 '75.0.3770.5',
156 '74.0.3729.108',
157 '74.0.3729.107',
158 '76.0.3771.1',
159 '76.0.3771.0',
160 '75.0.3770.4',
161 '74.0.3729.106',
162 '74.0.3729.105',
163 '75.0.3770.3',
164 '74.0.3729.104',
165 '74.0.3729.103',
166 '74.0.3729.102',
167 '75.0.3770.2',
168 '74.0.3729.101',
169 '75.0.3770.1',
170 '75.0.3770.0',
171 '74.0.3729.100',
172 '75.0.3769.5',
173 '75.0.3769.4',
174 '74.0.3729.99',
175 '75.0.3769.3',
176 '75.0.3769.2',
177 '75.0.3768.6',
178 '74.0.3729.98',
179 '75.0.3769.1',
180 '75.0.3769.0',
181 '74.0.3729.97',
182 '73.0.3683.119',
183 '73.0.3683.118',
184 '74.0.3729.96',
185 '75.0.3768.5',
186 '75.0.3768.4',
187 '75.0.3768.3',
188 '75.0.3768.2',
189 '74.0.3729.95',
190 '74.0.3729.94',
191 '75.0.3768.1',
192 '75.0.3768.0',
193 '74.0.3729.93',
194 '74.0.3729.92',
195 '73.0.3683.117',
196 '74.0.3729.91',
197 '75.0.3766.3',
198 '74.0.3729.90',
199 '75.0.3767.2',
200 '75.0.3767.1',
201 '75.0.3767.0',
202 '74.0.3729.89',
203 '73.0.3683.116',
204 '75.0.3766.2',
205 '74.0.3729.88',
206 '75.0.3766.1',
207 '75.0.3766.0',
208 '74.0.3729.87',
209 '73.0.3683.115',
210 '74.0.3729.86',
211 '75.0.3765.1',
212 '75.0.3765.0',
213 '74.0.3729.85',
214 '73.0.3683.114',
215 '74.0.3729.84',
216 '75.0.3764.1',
217 '75.0.3764.0',
218 '74.0.3729.83',
219 '73.0.3683.113',
220 '75.0.3763.2',
221 '75.0.3761.4',
222 '74.0.3729.82',
223 '75.0.3763.1',
224 '75.0.3763.0',
225 '74.0.3729.81',
226 '73.0.3683.112',
227 '75.0.3762.1',
228 '75.0.3762.0',
229 '74.0.3729.80',
230 '75.0.3761.3',
231 '74.0.3729.79',
232 '73.0.3683.111',
233 '75.0.3761.2',
234 '74.0.3729.78',
235 '74.0.3729.77',
236 '75.0.3761.1',
237 '75.0.3761.0',
238 '73.0.3683.110',
239 '74.0.3729.76',
240 '74.0.3729.75',
241 '75.0.3760.0',
242 '74.0.3729.74',
243 '75.0.3759.8',
244 '75.0.3759.7',
245 '75.0.3759.6',
246 '74.0.3729.73',
247 '75.0.3759.5',
248 '74.0.3729.72',
249 '73.0.3683.109',
250 '75.0.3759.4',
251 '75.0.3759.3',
252 '74.0.3729.71',
253 '75.0.3759.2',
254 '74.0.3729.70',
255 '73.0.3683.108',
256 '74.0.3729.69',
257 '75.0.3759.1',
258 '75.0.3759.0',
259 '74.0.3729.68',
260 '73.0.3683.107',
261 '74.0.3729.67',
262 '75.0.3758.1',
263 '75.0.3758.0',
264 '74.0.3729.66',
265 '73.0.3683.106',
266 '74.0.3729.65',
267 '75.0.3757.1',
268 '75.0.3757.0',
269 '74.0.3729.64',
270 '73.0.3683.105',
271 '74.0.3729.63',
272 '75.0.3756.1',
273 '75.0.3756.0',
274 '74.0.3729.62',
275 '73.0.3683.104',
276 '75.0.3755.3',
277 '75.0.3755.2',
278 '73.0.3683.103',
279 '75.0.3755.1',
280 '75.0.3755.0',
281 '74.0.3729.61',
282 '73.0.3683.102',
283 '74.0.3729.60',
284 '75.0.3754.2',
285 '74.0.3729.59',
286 '75.0.3753.4',
287 '74.0.3729.58',
288 '75.0.3754.1',
289 '75.0.3754.0',
290 '74.0.3729.57',
291 '73.0.3683.101',
292 '75.0.3753.3',
293 '75.0.3752.2',
294 '75.0.3753.2',
295 '74.0.3729.56',
296 '75.0.3753.1',
297 '75.0.3753.0',
298 '74.0.3729.55',
299 '73.0.3683.100',
300 '74.0.3729.54',
301 '75.0.3752.1',
302 '75.0.3752.0',
303 '74.0.3729.53',
304 '73.0.3683.99',
305 '74.0.3729.52',
306 '75.0.3751.1',
307 '75.0.3751.0',
308 '74.0.3729.51',
309 '73.0.3683.98',
310 '74.0.3729.50',
311 '75.0.3750.0',
312 '74.0.3729.49',
313 '74.0.3729.48',
314 '74.0.3729.47',
315 '75.0.3749.3',
316 '74.0.3729.46',
317 '73.0.3683.97',
318 '75.0.3749.2',
319 '74.0.3729.45',
320 '75.0.3749.1',
321 '75.0.3749.0',
322 '74.0.3729.44',
323 '73.0.3683.96',
324 '74.0.3729.43',
325 '74.0.3729.42',
326 '75.0.3748.1',
327 '75.0.3748.0',
328 '74.0.3729.41',
329 '75.0.3747.1',
330 '73.0.3683.95',
331 '75.0.3746.4',
332 '74.0.3729.40',
333 '74.0.3729.39',
334 '75.0.3747.0',
335 '75.0.3746.3',
336 '75.0.3746.2',
337 '74.0.3729.38',
338 '75.0.3746.1',
339 '75.0.3746.0',
340 '74.0.3729.37',
341 '73.0.3683.94',
342 '75.0.3745.5',
343 '75.0.3745.4',
344 '75.0.3745.3',
345 '75.0.3745.2',
346 '74.0.3729.36',
347 '75.0.3745.1',
348 '75.0.3745.0',
349 '75.0.3744.2',
350 '74.0.3729.35',
351 '73.0.3683.93',
352 '74.0.3729.34',
353 '75.0.3744.1',
354 '75.0.3744.0',
355 '74.0.3729.33',
356 '73.0.3683.92',
357 '74.0.3729.32',
358 '74.0.3729.31',
359 '73.0.3683.91',
360 '75.0.3741.2',
361 '75.0.3740.5',
362 '74.0.3729.30',
363 '75.0.3741.1',
364 '75.0.3741.0',
365 '74.0.3729.29',
366 '75.0.3740.4',
367 '73.0.3683.90',
368 '74.0.3729.28',
369 '75.0.3740.3',
370 '73.0.3683.89',
371 '75.0.3740.2',
372 '74.0.3729.27',
373 '75.0.3740.1',
374 '75.0.3740.0',
375 '74.0.3729.26',
376 '73.0.3683.88',
377 '73.0.3683.87',
378 '74.0.3729.25',
379 '75.0.3739.1',
380 '75.0.3739.0',
381 '73.0.3683.86',
382 '74.0.3729.24',
383 '73.0.3683.85',
384 '75.0.3738.4',
385 '75.0.3738.3',
386 '75.0.3738.2',
387 '75.0.3738.1',
388 '75.0.3738.0',
389 '74.0.3729.23',
390 '73.0.3683.84',
391 '74.0.3729.22',
392 '74.0.3729.21',
393 '75.0.3737.1',
394 '75.0.3737.0',
395 '74.0.3729.20',
396 '73.0.3683.83',
397 '74.0.3729.19',
398 '75.0.3736.1',
399 '75.0.3736.0',
400 '74.0.3729.18',
401 '73.0.3683.82',
402 '74.0.3729.17',
403 '75.0.3735.1',
404 '75.0.3735.0',
405 '74.0.3729.16',
406 '73.0.3683.81',
407 '75.0.3734.1',
408 '75.0.3734.0',
409 '74.0.3729.15',
410 '73.0.3683.80',
411 '74.0.3729.14',
412 '75.0.3733.1',
413 '75.0.3733.0',
414 '75.0.3732.1',
415 '74.0.3729.13',
416 '74.0.3729.12',
417 '73.0.3683.79',
418 '74.0.3729.11',
419 '75.0.3732.0',
420 '74.0.3729.10',
421 '73.0.3683.78',
422 '74.0.3729.9',
423 '74.0.3729.8',
424 '74.0.3729.7',
425 '75.0.3731.3',
426 '75.0.3731.2',
427 '75.0.3731.0',
428 '74.0.3729.6',
429 '73.0.3683.77',
430 '73.0.3683.76',
431 '75.0.3730.5',
432 '75.0.3730.4',
433 '73.0.3683.75',
434 '74.0.3729.5',
435 '73.0.3683.74',
436 '75.0.3730.3',
437 '75.0.3730.2',
438 '74.0.3729.4',
439 '73.0.3683.73',
440 '73.0.3683.72',
441 '75.0.3730.1',
442 '75.0.3730.0',
443 '74.0.3729.3',
444 '73.0.3683.71',
445 '74.0.3729.2',
446 '73.0.3683.70',
447 '74.0.3729.1',
448 '74.0.3729.0',
449 '74.0.3726.4',
450 '73.0.3683.69',
451 '74.0.3726.3',
452 '74.0.3728.0',
453 '74.0.3726.2',
454 '73.0.3683.68',
455 '74.0.3726.1',
456 '74.0.3726.0',
457 '74.0.3725.4',
458 '73.0.3683.67',
459 '73.0.3683.66',
460 '74.0.3725.3',
461 '74.0.3725.2',
462 '74.0.3725.1',
463 '74.0.3724.8',
464 '74.0.3725.0',
465 '73.0.3683.65',
466 '74.0.3724.7',
467 '74.0.3724.6',
468 '74.0.3724.5',
469 '74.0.3724.4',
470 '74.0.3724.3',
471 '74.0.3724.2',
472 '74.0.3724.1',
473 '74.0.3724.0',
474 '73.0.3683.64',
475 '74.0.3723.1',
476 '74.0.3723.0',
477 '73.0.3683.63',
478 '74.0.3722.1',
479 '74.0.3722.0',
480 '73.0.3683.62',
481 '74.0.3718.9',
482 '74.0.3702.3',
483 '74.0.3721.3',
484 '74.0.3721.2',
485 '74.0.3721.1',
486 '74.0.3721.0',
487 '74.0.3720.6',
488 '73.0.3683.61',
489 '72.0.3626.122',
490 '73.0.3683.60',
491 '74.0.3720.5',
492 '72.0.3626.121',
493 '74.0.3718.8',
494 '74.0.3720.4',
495 '74.0.3720.3',
496 '74.0.3718.7',
497 '74.0.3720.2',
498 '74.0.3720.1',
499 '74.0.3720.0',
500 '74.0.3718.6',
501 '74.0.3719.5',
502 '73.0.3683.59',
503 '74.0.3718.5',
504 '74.0.3718.4',
505 '74.0.3719.4',
506 '74.0.3719.3',
507 '74.0.3719.2',
508 '74.0.3719.1',
509 '73.0.3683.58',
510 '74.0.3719.0',
511 '73.0.3683.57',
512 '73.0.3683.56',
513 '74.0.3718.3',
514 '73.0.3683.55',
515 '74.0.3718.2',
516 '74.0.3718.1',
517 '74.0.3718.0',
518 '73.0.3683.54',
519 '74.0.3717.2',
520 '73.0.3683.53',
521 '74.0.3717.1',
522 '74.0.3717.0',
523 '73.0.3683.52',
524 '74.0.3716.1',
525 '74.0.3716.0',
526 '73.0.3683.51',
527 '74.0.3715.1',
528 '74.0.3715.0',
529 '73.0.3683.50',
530 '74.0.3711.2',
531 '74.0.3714.2',
532 '74.0.3713.3',
533 '74.0.3714.1',
534 '74.0.3714.0',
535 '73.0.3683.49',
536 '74.0.3713.1',
537 '74.0.3713.0',
538 '72.0.3626.120',
539 '73.0.3683.48',
540 '74.0.3712.2',
541 '74.0.3712.1',
542 '74.0.3712.0',
543 '73.0.3683.47',
544 '72.0.3626.119',
545 '73.0.3683.46',
546 '74.0.3710.2',
547 '72.0.3626.118',
548 '74.0.3711.1',
549 '74.0.3711.0',
550 '73.0.3683.45',
551 '72.0.3626.117',
552 '74.0.3710.1',
553 '74.0.3710.0',
554 '73.0.3683.44',
555 '72.0.3626.116',
556 '74.0.3709.1',
557 '74.0.3709.0',
558 '74.0.3704.9',
559 '73.0.3683.43',
560 '72.0.3626.115',
561 '74.0.3704.8',
562 '74.0.3704.7',
563 '74.0.3708.0',
564 '74.0.3706.7',
565 '74.0.3704.6',
566 '73.0.3683.42',
567 '72.0.3626.114',
568 '74.0.3706.6',
569 '72.0.3626.113',
570 '74.0.3704.5',
571 '74.0.3706.5',
572 '74.0.3706.4',
573 '74.0.3706.3',
574 '74.0.3706.2',
575 '74.0.3706.1',
576 '74.0.3706.0',
577 '73.0.3683.41',
578 '72.0.3626.112',
579 '74.0.3705.1',
580 '74.0.3705.0',
581 '73.0.3683.40',
582 '72.0.3626.111',
583 '73.0.3683.39',
584 '74.0.3704.4',
585 '73.0.3683.38',
586 '74.0.3704.3',
587 '74.0.3704.2',
588 '74.0.3704.1',
589 '74.0.3704.0',
590 '73.0.3683.37',
591 '72.0.3626.110',
592 '72.0.3626.109',
593 '74.0.3703.3',
594 '74.0.3703.2',
595 '73.0.3683.36',
596 '74.0.3703.1',
597 '74.0.3703.0',
598 '73.0.3683.35',
599 '72.0.3626.108',
600 '74.0.3702.2',
601 '74.0.3699.3',
602 '74.0.3702.1',
603 '74.0.3702.0',
604 '73.0.3683.34',
605 '72.0.3626.107',
606 '73.0.3683.33',
607 '74.0.3701.1',
608 '74.0.3701.0',
609 '73.0.3683.32',
610 '73.0.3683.31',
611 '72.0.3626.105',
612 '74.0.3700.1',
613 '74.0.3700.0',
614 '73.0.3683.29',
615 '72.0.3626.103',
616 '74.0.3699.2',
617 '74.0.3699.1',
618 '74.0.3699.0',
619 '73.0.3683.28',
620 '72.0.3626.102',
621 '73.0.3683.27',
622 '73.0.3683.26',
623 '74.0.3698.0',
624 '74.0.3696.2',
625 '72.0.3626.101',
626 '73.0.3683.25',
627 '74.0.3696.1',
628 '74.0.3696.0',
629 '74.0.3694.8',
630 '72.0.3626.100',
631 '74.0.3694.7',
632 '74.0.3694.6',
633 '74.0.3694.5',
634 '74.0.3694.4',
635 '72.0.3626.99',
636 '72.0.3626.98',
637 '74.0.3694.3',
638 '73.0.3683.24',
639 '72.0.3626.97',
640 '72.0.3626.96',
641 '72.0.3626.95',
642 '73.0.3683.23',
643 '72.0.3626.94',
644 '73.0.3683.22',
645 '73.0.3683.21',
646 '72.0.3626.93',
647 '74.0.3694.2',
648 '72.0.3626.92',
649 '74.0.3694.1',
650 '74.0.3694.0',
651 '74.0.3693.6',
652 '73.0.3683.20',
653 '72.0.3626.91',
654 '74.0.3693.5',
655 '74.0.3693.4',
656 '74.0.3693.3',
657 '74.0.3693.2',
658 '73.0.3683.19',
659 '74.0.3693.1',
660 '74.0.3693.0',
661 '73.0.3683.18',
662 '72.0.3626.90',
663 '74.0.3692.1',
664 '74.0.3692.0',
665 '73.0.3683.17',
666 '72.0.3626.89',
667 '74.0.3687.3',
668 '74.0.3691.1',
669 '74.0.3691.0',
670 '73.0.3683.16',
671 '72.0.3626.88',
672 '72.0.3626.87',
673 '73.0.3683.15',
674 '74.0.3690.1',
675 '74.0.3690.0',
676 '73.0.3683.14',
677 '72.0.3626.86',
678 '73.0.3683.13',
679 '73.0.3683.12',
680 '74.0.3689.1',
681 '74.0.3689.0',
682 '73.0.3683.11',
683 '72.0.3626.85',
684 '73.0.3683.10',
685 '72.0.3626.84',
686 '73.0.3683.9',
687 '74.0.3688.1',
688 '74.0.3688.0',
689 '73.0.3683.8',
690 '72.0.3626.83',
691 '74.0.3687.2',
692 '74.0.3687.1',
693 '74.0.3687.0',
694 '73.0.3683.7',
695 '72.0.3626.82',
696 '74.0.3686.4',
697 '72.0.3626.81',
698 '74.0.3686.3',
699 '74.0.3686.2',
700 '74.0.3686.1',
701 '74.0.3686.0',
702 '73.0.3683.6',
703 '72.0.3626.80',
704 '74.0.3685.1',
705 '74.0.3685.0',
706 '73.0.3683.5',
707 '72.0.3626.79',
708 '74.0.3684.1',
709 '74.0.3684.0',
710 '73.0.3683.4',
711 '72.0.3626.78',
712 '72.0.3626.77',
713 '73.0.3683.3',
714 '73.0.3683.2',
715 '72.0.3626.76',
716 '73.0.3683.1',
717 '73.0.3683.0',
718 '72.0.3626.75',
719 '71.0.3578.141',
720 '73.0.3682.1',
721 '73.0.3682.0',
722 '72.0.3626.74',
723 '71.0.3578.140',
724 '73.0.3681.4',
725 '73.0.3681.3',
726 '73.0.3681.2',
727 '73.0.3681.1',
728 '73.0.3681.0',
729 '72.0.3626.73',
730 '71.0.3578.139',
731 '72.0.3626.72',
732 '72.0.3626.71',
733 '73.0.3680.1',
734 '73.0.3680.0',
735 '72.0.3626.70',
736 '71.0.3578.138',
737 '73.0.3678.2',
738 '73.0.3679.1',
739 '73.0.3679.0',
740 '72.0.3626.69',
741 '71.0.3578.137',
742 '73.0.3678.1',
743 '73.0.3678.0',
744 '71.0.3578.136',
745 '73.0.3677.1',
746 '73.0.3677.0',
747 '72.0.3626.68',
748 '72.0.3626.67',
749 '71.0.3578.135',
750 '73.0.3676.1',
751 '73.0.3676.0',
752 '73.0.3674.2',
753 '72.0.3626.66',
754 '71.0.3578.134',
755 '73.0.3674.1',
756 '73.0.3674.0',
757 '72.0.3626.65',
758 '71.0.3578.133',
759 '73.0.3673.2',
760 '73.0.3673.1',
761 '73.0.3673.0',
762 '72.0.3626.64',
763 '71.0.3578.132',
764 '72.0.3626.63',
765 '72.0.3626.62',
766 '72.0.3626.61',
767 '72.0.3626.60',
768 '73.0.3672.1',
769 '73.0.3672.0',
770 '72.0.3626.59',
771 '71.0.3578.131',
772 '73.0.3671.3',
773 '73.0.3671.2',
774 '73.0.3671.1',
775 '73.0.3671.0',
776 '72.0.3626.58',
777 '71.0.3578.130',
778 '73.0.3670.1',
779 '73.0.3670.0',
780 '72.0.3626.57',
781 '71.0.3578.129',
782 '73.0.3669.1',
783 '73.0.3669.0',
784 '72.0.3626.56',
785 '71.0.3578.128',
786 '73.0.3668.2',
787 '73.0.3668.1',
788 '73.0.3668.0',
789 '72.0.3626.55',
790 '71.0.3578.127',
791 '73.0.3667.2',
792 '73.0.3667.1',
793 '73.0.3667.0',
794 '72.0.3626.54',
795 '71.0.3578.126',
796 '73.0.3666.1',
797 '73.0.3666.0',
798 '72.0.3626.53',
799 '71.0.3578.125',
800 '73.0.3665.4',
801 '73.0.3665.3',
802 '72.0.3626.52',
803 '73.0.3665.2',
804 '73.0.3664.4',
805 '73.0.3665.1',
806 '73.0.3665.0',
807 '72.0.3626.51',
808 '71.0.3578.124',
809 '72.0.3626.50',
810 '73.0.3664.3',
811 '73.0.3664.2',
812 '73.0.3664.1',
813 '73.0.3664.0',
814 '73.0.3663.2',
815 '72.0.3626.49',
816 '71.0.3578.123',
817 '73.0.3663.1',
818 '73.0.3663.0',
819 '72.0.3626.48',
820 '71.0.3578.122',
821 '73.0.3662.1',
822 '73.0.3662.0',
823 '72.0.3626.47',
824 '71.0.3578.121',
825 '73.0.3661.1',
826 '72.0.3626.46',
827 '73.0.3661.0',
828 '72.0.3626.45',
829 '71.0.3578.120',
830 '73.0.3660.2',
831 '73.0.3660.1',
832 '73.0.3660.0',
833 '72.0.3626.44',
834 '71.0.3578.119',
835 '73.0.3659.1',
836 '73.0.3659.0',
837 '72.0.3626.43',
838 '71.0.3578.118',
839 '73.0.3658.1',
840 '73.0.3658.0',
841 '72.0.3626.42',
842 '71.0.3578.117',
843 '73.0.3657.1',
844 '73.0.3657.0',
845 '72.0.3626.41',
846 '71.0.3578.116',
847 '73.0.3656.1',
848 '73.0.3656.0',
849 '72.0.3626.40',
850 '71.0.3578.115',
851 '73.0.3655.1',
852 '73.0.3655.0',
853 '72.0.3626.39',
854 '71.0.3578.114',
855 '73.0.3654.1',
856 '73.0.3654.0',
857 '72.0.3626.38',
858 '71.0.3578.113',
859 '73.0.3653.1',
860 '73.0.3653.0',
861 '72.0.3626.37',
862 '71.0.3578.112',
863 '73.0.3652.1',
864 '73.0.3652.0',
865 '72.0.3626.36',
866 '71.0.3578.111',
867 '73.0.3651.1',
868 '73.0.3651.0',
869 '72.0.3626.35',
870 '71.0.3578.110',
871 '73.0.3650.1',
872 '73.0.3650.0',
873 '72.0.3626.34',
874 '71.0.3578.109',
875 '73.0.3649.1',
876 '73.0.3649.0',
877 '72.0.3626.33',
878 '71.0.3578.108',
879 '73.0.3648.2',
880 '73.0.3648.1',
881 '73.0.3648.0',
882 '72.0.3626.32',
883 '71.0.3578.107',
884 '73.0.3647.2',
885 '73.0.3647.1',
886 '73.0.3647.0',
887 '72.0.3626.31',
888 '71.0.3578.106',
889 '73.0.3635.3',
890 '73.0.3646.2',
891 '73.0.3646.1',
892 '73.0.3646.0',
893 '72.0.3626.30',
894 '71.0.3578.105',
895 '72.0.3626.29',
896 '73.0.3645.2',
897 '73.0.3645.1',
898 '73.0.3645.0',
899 '72.0.3626.28',
900 '71.0.3578.104',
901 '72.0.3626.27',
902 '72.0.3626.26',
903 '72.0.3626.25',
904 '72.0.3626.24',
905 '73.0.3644.0',
906 '73.0.3643.2',
907 '72.0.3626.23',
908 '71.0.3578.103',
909 '73.0.3643.1',
910 '73.0.3643.0',
911 '72.0.3626.22',
912 '71.0.3578.102',
913 '73.0.3642.1',
914 '73.0.3642.0',
915 '72.0.3626.21',
916 '71.0.3578.101',
917 '73.0.3641.1',
918 '73.0.3641.0',
919 '72.0.3626.20',
920 '71.0.3578.100',
921 '72.0.3626.19',
922 '73.0.3640.1',
923 '73.0.3640.0',
924 '72.0.3626.18',
925 '73.0.3639.1',
926 '71.0.3578.99',
927 '73.0.3639.0',
928 '72.0.3626.17',
929 '73.0.3638.2',
930 '72.0.3626.16',
931 '73.0.3638.1',
932 '73.0.3638.0',
933 '72.0.3626.15',
934 '71.0.3578.98',
935 '73.0.3635.2',
936 '71.0.3578.97',
937 '73.0.3637.1',
938 '73.0.3637.0',
939 '72.0.3626.14',
940 '71.0.3578.96',
941 '71.0.3578.95',
942 '72.0.3626.13',
943 '71.0.3578.94',
944 '73.0.3636.2',
945 '71.0.3578.93',
946 '73.0.3636.1',
947 '73.0.3636.0',
948 '72.0.3626.12',
949 '71.0.3578.92',
950 '73.0.3635.1',
951 '73.0.3635.0',
952 '72.0.3626.11',
953 '71.0.3578.91',
954 '73.0.3634.2',
955 '73.0.3634.1',
956 '73.0.3634.0',
957 '72.0.3626.10',
958 '71.0.3578.90',
959 '71.0.3578.89',
960 '73.0.3633.2',
961 '73.0.3633.1',
962 '73.0.3633.0',
963 '72.0.3610.4',
964 '72.0.3626.9',
965 '71.0.3578.88',
966 '73.0.3632.5',
967 '73.0.3632.4',
968 '73.0.3632.3',
969 '73.0.3632.2',
970 '73.0.3632.1',
971 '73.0.3632.0',
972 '72.0.3626.8',
973 '71.0.3578.87',
974 '73.0.3631.2',
975 '73.0.3631.1',
976 '73.0.3631.0',
977 '72.0.3626.7',
978 '71.0.3578.86',
979 '72.0.3626.6',
980 '73.0.3630.1',
981 '73.0.3630.0',
982 '72.0.3626.5',
983 '71.0.3578.85',
984 '72.0.3626.4',
985 '73.0.3628.3',
986 '73.0.3628.2',
987 '73.0.3629.1',
988 '73.0.3629.0',
989 '72.0.3626.3',
990 '71.0.3578.84',
991 '73.0.3628.1',
992 '73.0.3628.0',
993 '71.0.3578.83',
994 '73.0.3627.1',
995 '73.0.3627.0',
996 '72.0.3626.2',
997 '71.0.3578.82',
998 '71.0.3578.81',
999 '71.0.3578.80',
1000 '72.0.3626.1',
1001 '72.0.3626.0',
1002 '71.0.3578.79',
1003 '70.0.3538.124',
1004 '71.0.3578.78',
1005 '72.0.3623.4',
1006 '72.0.3625.2',
1007 '72.0.3625.1',
1008 '72.0.3625.0',
1009 '71.0.3578.77',
1010 '70.0.3538.123',
1011 '72.0.3624.4',
1012 '72.0.3624.3',
1013 '72.0.3624.2',
1014 '71.0.3578.76',
1015 '72.0.3624.1',
1016 '72.0.3624.0',
1017 '72.0.3623.3',
1018 '71.0.3578.75',
1019 '70.0.3538.122',
1020 '71.0.3578.74',
1021 '72.0.3623.2',
1022 '72.0.3610.3',
1023 '72.0.3623.1',
1024 '72.0.3623.0',
1025 '72.0.3622.3',
1026 '72.0.3622.2',
1027 '71.0.3578.73',
1028 '70.0.3538.121',
1029 '72.0.3622.1',
1030 '72.0.3622.0',
1031 '71.0.3578.72',
1032 '70.0.3538.120',
1033 '72.0.3621.1',
1034 '72.0.3621.0',
1035 '71.0.3578.71',
1036 '70.0.3538.119',
1037 '72.0.3620.1',
1038 '72.0.3620.0',
1039 '71.0.3578.70',
1040 '70.0.3538.118',
1041 '71.0.3578.69',
1042 '72.0.3619.1',
1043 '72.0.3619.0',
1044 '71.0.3578.68',
1045 '70.0.3538.117',
1046 '71.0.3578.67',
1047 '72.0.3618.1',
1048 '72.0.3618.0',
1049 '71.0.3578.66',
1050 '70.0.3538.116',
1051 '72.0.3617.1',
1052 '72.0.3617.0',
1053 '71.0.3578.65',
1054 '70.0.3538.115',
1055 '72.0.3602.3',
1056 '71.0.3578.64',
1057 '72.0.3616.1',
1058 '72.0.3616.0',
1059 '71.0.3578.63',
1060 '70.0.3538.114',
1061 '71.0.3578.62',
1062 '72.0.3615.1',
1063 '72.0.3615.0',
1064 '71.0.3578.61',
1065 '70.0.3538.113',
1066 '72.0.3614.1',
1067 '72.0.3614.0',
1068 '71.0.3578.60',
1069 '70.0.3538.112',
1070 '72.0.3613.1',
1071 '72.0.3613.0',
1072 '71.0.3578.59',
1073 '70.0.3538.111',
1074 '72.0.3612.2',
1075 '72.0.3612.1',
1076 '72.0.3612.0',
1077 '70.0.3538.110',
1078 '71.0.3578.58',
1079 '70.0.3538.109',
1080 '72.0.3611.2',
1081 '72.0.3611.1',
1082 '72.0.3611.0',
1083 '71.0.3578.57',
1084 '70.0.3538.108',
1085 '72.0.3610.2',
1086 '71.0.3578.56',
1087 '71.0.3578.55',
1088 '72.0.3610.1',
1089 '72.0.3610.0',
1090 '71.0.3578.54',
1091 '70.0.3538.107',
1092 '71.0.3578.53',
1093 '72.0.3609.3',
1094 '71.0.3578.52',
1095 '72.0.3609.2',
1096 '71.0.3578.51',
1097 '72.0.3608.5',
1098 '72.0.3609.1',
1099 '72.0.3609.0',
1100 '71.0.3578.50',
1101 '70.0.3538.106',
1102 '72.0.3608.4',
1103 '72.0.3608.3',
1104 '72.0.3608.2',
1105 '71.0.3578.49',
1106 '72.0.3608.1',
1107 '72.0.3608.0',
1108 '70.0.3538.105',
1109 '71.0.3578.48',
1110 '72.0.3607.1',
1111 '72.0.3607.0',
1112 '71.0.3578.47',
1113 '70.0.3538.104',
1114 '72.0.3606.2',
1115 '72.0.3606.1',
1116 '72.0.3606.0',
1117 '71.0.3578.46',
1118 '70.0.3538.103',
1119 '70.0.3538.102',
1120 '72.0.3605.3',
1121 '72.0.3605.2',
1122 '72.0.3605.1',
1123 '72.0.3605.0',
1124 '71.0.3578.45',
1125 '70.0.3538.101',
1126 '71.0.3578.44',
1127 '71.0.3578.43',
1128 '70.0.3538.100',
1129 '70.0.3538.99',
1130 '71.0.3578.42',
1131 '72.0.3604.1',
1132 '72.0.3604.0',
1133 '71.0.3578.41',
1134 '70.0.3538.98',
1135 '71.0.3578.40',
1136 '72.0.3603.2',
1137 '72.0.3603.1',
1138 '72.0.3603.0',
1139 '71.0.3578.39',
1140 '70.0.3538.97',
1141 '72.0.3602.2',
1142 '71.0.3578.38',
1143 '71.0.3578.37',
1144 '72.0.3602.1',
1145 '72.0.3602.0',
1146 '71.0.3578.36',
1147 '70.0.3538.96',
1148 '72.0.3601.1',
1149 '72.0.3601.0',
1150 '71.0.3578.35',
1151 '70.0.3538.95',
1152 '72.0.3600.1',
1153 '72.0.3600.0',
1154 '71.0.3578.34',
1155 '70.0.3538.94',
1156 '72.0.3599.3',
1157 '72.0.3599.2',
1158 '72.0.3599.1',
1159 '72.0.3599.0',
1160 '71.0.3578.33',
1161 '70.0.3538.93',
1162 '72.0.3598.1',
1163 '72.0.3598.0',
1164 '71.0.3578.32',
1165 '70.0.3538.87',
1166 '72.0.3597.1',
1167 '72.0.3597.0',
1168 '72.0.3596.2',
1169 '71.0.3578.31',
1170 '70.0.3538.86',
1171 '71.0.3578.30',
1172 '71.0.3578.29',
1173 '72.0.3596.1',
1174 '72.0.3596.0',
1175 '71.0.3578.28',
1176 '70.0.3538.85',
1177 '72.0.3595.2',
1178 '72.0.3591.3',
1179 '72.0.3595.1',
1180 '72.0.3595.0',
1181 '71.0.3578.27',
1182 '70.0.3538.84',
1183 '72.0.3594.1',
1184 '72.0.3594.0',
1185 '71.0.3578.26',
1186 '70.0.3538.83',
1187 '72.0.3593.2',
1188 '72.0.3593.1',
1189 '72.0.3593.0',
1190 '71.0.3578.25',
1191 '70.0.3538.82',
1192 '72.0.3589.3',
1193 '72.0.3592.2',
1194 '72.0.3592.1',
1195 '72.0.3592.0',
1196 '71.0.3578.24',
1197 '72.0.3589.2',
1198 '70.0.3538.81',
1199 '70.0.3538.80',
1200 '72.0.3591.2',
1201 '72.0.3591.1',
1202 '72.0.3591.0',
1203 '71.0.3578.23',
1204 '70.0.3538.79',
1205 '71.0.3578.22',
1206 '72.0.3590.1',
1207 '72.0.3590.0',
1208 '71.0.3578.21',
1209 '70.0.3538.78',
1210 '70.0.3538.77',
1211 '72.0.3589.1',
1212 '72.0.3589.0',
1213 '71.0.3578.20',
1214 '70.0.3538.76',
1215 '71.0.3578.19',
1216 '70.0.3538.75',
1217 '72.0.3588.1',
1218 '72.0.3588.0',
1219 '71.0.3578.18',
1220 '70.0.3538.74',
1221 '72.0.3586.2',
1222 '72.0.3587.0',
1223 '71.0.3578.17',
1224 '70.0.3538.73',
1225 '72.0.3586.1',
1226 '72.0.3586.0',
1227 '71.0.3578.16',
1228 '70.0.3538.72',
1229 '72.0.3585.1',
1230 '72.0.3585.0',
1231 '71.0.3578.15',
1232 '70.0.3538.71',
1233 '71.0.3578.14',
1234 '72.0.3584.1',
1235 '72.0.3584.0',
1236 '71.0.3578.13',
1237 '70.0.3538.70',
1238 '72.0.3583.2',
1239 '71.0.3578.12',
1240 '72.0.3583.1',
1241 '72.0.3583.0',
1242 '71.0.3578.11',
1243 '70.0.3538.69',
1244 '71.0.3578.10',
1245 '72.0.3582.0',
1246 '72.0.3581.4',
1247 '71.0.3578.9',
1248 '70.0.3538.67',
1249 '72.0.3581.3',
1250 '72.0.3581.2',
1251 '72.0.3581.1',
1252 '72.0.3581.0',
1253 '71.0.3578.8',
1254 '70.0.3538.66',
1255 '72.0.3580.1',
1256 '72.0.3580.0',
1257 '71.0.3578.7',
1258 '70.0.3538.65',
1259 '71.0.3578.6',
1260 '72.0.3579.1',
1261 '72.0.3579.0',
1262 '71.0.3578.5',
1263 '70.0.3538.64',
1264 '71.0.3578.4',
1265 '71.0.3578.3',
1266 '71.0.3578.2',
1267 '71.0.3578.1',
1268 '71.0.3578.0',
1269 '70.0.3538.63',
1270 '69.0.3497.128',
1271 '70.0.3538.62',
1272 '70.0.3538.61',
1273 '70.0.3538.60',
1274 '70.0.3538.59',
1275 '71.0.3577.1',
1276 '71.0.3577.0',
1277 '70.0.3538.58',
1278 '69.0.3497.127',
1279 '71.0.3576.2',
1280 '71.0.3576.1',
1281 '71.0.3576.0',
1282 '70.0.3538.57',
1283 '70.0.3538.56',
1284 '71.0.3575.2',
1285 '70.0.3538.55',
1286 '69.0.3497.126',
1287 '70.0.3538.54',
1288 '71.0.3575.1',
1289 '71.0.3575.0',
1290 '71.0.3574.1',
1291 '71.0.3574.0',
1292 '70.0.3538.53',
1293 '69.0.3497.125',
1294 '70.0.3538.52',
1295 '71.0.3573.1',
1296 '71.0.3573.0',
1297 '70.0.3538.51',
1298 '69.0.3497.124',
1299 '71.0.3572.1',
1300 '71.0.3572.0',
1301 '70.0.3538.50',
1302 '69.0.3497.123',
1303 '71.0.3571.2',
1304 '70.0.3538.49',
1305 '69.0.3497.122',
1306 '71.0.3571.1',
1307 '71.0.3571.0',
1308 '70.0.3538.48',
1309 '69.0.3497.121',
1310 '71.0.3570.1',
1311 '71.0.3570.0',
1312 '70.0.3538.47',
1313 '69.0.3497.120',
1314 '71.0.3568.2',
1315 '71.0.3569.1',
1316 '71.0.3569.0',
1317 '70.0.3538.46',
1318 '69.0.3497.119',
1319 '70.0.3538.45',
1320 '71.0.3568.1',
1321 '71.0.3568.0',
1322 '70.0.3538.44',
1323 '69.0.3497.118',
1324 '70.0.3538.43',
1325 '70.0.3538.42',
1326 '71.0.3567.1',
1327 '71.0.3567.0',
1328 '70.0.3538.41',
1329 '69.0.3497.117',
1330 '71.0.3566.1',
1331 '71.0.3566.0',
1332 '70.0.3538.40',
1333 '69.0.3497.116',
1334 '71.0.3565.1',
1335 '71.0.3565.0',
1336 '70.0.3538.39',
1337 '69.0.3497.115',
1338 '71.0.3564.1',
1339 '71.0.3564.0',
1340 '70.0.3538.38',
1341 '69.0.3497.114',
1342 '71.0.3563.0',
1343 '71.0.3562.2',
1344 '70.0.3538.37',
1345 '69.0.3497.113',
1346 '70.0.3538.36',
1347 '70.0.3538.35',
1348 '71.0.3562.1',
1349 '71.0.3562.0',
1350 '70.0.3538.34',
1351 '69.0.3497.112',
1352 '70.0.3538.33',
1353 '71.0.3561.1',
1354 '71.0.3561.0',
1355 '70.0.3538.32',
1356 '69.0.3497.111',
1357 '71.0.3559.6',
1358 '71.0.3560.1',
1359 '71.0.3560.0',
1360 '71.0.3559.5',
1361 '71.0.3559.4',
1362 '70.0.3538.31',
1363 '69.0.3497.110',
1364 '71.0.3559.3',
1365 '70.0.3538.30',
1366 '69.0.3497.109',
1367 '71.0.3559.2',
1368 '71.0.3559.1',
1369 '71.0.3559.0',
1370 '70.0.3538.29',
1371 '69.0.3497.108',
1372 '71.0.3558.2',
1373 '71.0.3558.1',
1374 '71.0.3558.0',
1375 '70.0.3538.28',
1376 '69.0.3497.107',
1377 '71.0.3557.2',
1378 '71.0.3557.1',
1379 '71.0.3557.0',
1380 '70.0.3538.27',
1381 '69.0.3497.106',
1382 '71.0.3554.4',
1383 '70.0.3538.26',
1384 '71.0.3556.1',
1385 '71.0.3556.0',
1386 '70.0.3538.25',
1387 '71.0.3554.3',
1388 '69.0.3497.105',
1389 '71.0.3554.2',
1390 '70.0.3538.24',
1391 '69.0.3497.104',
1392 '71.0.3555.2',
1393 '70.0.3538.23',
1394 '71.0.3555.1',
1395 '71.0.3555.0',
1396 '70.0.3538.22',
1397 '69.0.3497.103',
1398 '71.0.3554.1',
1399 '71.0.3554.0',
1400 '70.0.3538.21',
1401 '69.0.3497.102',
1402 '71.0.3553.3',
1403 '70.0.3538.20',
1404 '69.0.3497.101',
1405 '71.0.3553.2',
1406 '69.0.3497.100',
1407 '71.0.3553.1',
1408 '71.0.3553.0',
1409 '70.0.3538.19',
1410 '69.0.3497.99',
1411 '69.0.3497.98',
1412 '69.0.3497.97',
1413 '71.0.3552.6',
1414 '71.0.3552.5',
1415 '71.0.3552.4',
1416 '71.0.3552.3',
1417 '71.0.3552.2',
1418 '71.0.3552.1',
1419 '71.0.3552.0',
1420 '70.0.3538.18',
1421 '69.0.3497.96',
1422 '71.0.3551.3',
1423 '71.0.3551.2',
1424 '71.0.3551.1',
1425 '71.0.3551.0',
1426 '70.0.3538.17',
1427 '69.0.3497.95',
1428 '71.0.3550.3',
1429 '71.0.3550.2',
1430 '71.0.3550.1',
1431 '71.0.3550.0',
1432 '70.0.3538.16',
1433 '69.0.3497.94',
1434 '71.0.3549.1',
1435 '71.0.3549.0',
1436 '70.0.3538.15',
1437 '69.0.3497.93',
1438 '69.0.3497.92',
1439 '71.0.3548.1',
1440 '71.0.3548.0',
1441 '70.0.3538.14',
1442 '69.0.3497.91',
1443 '71.0.3547.1',
1444 '71.0.3547.0',
1445 '70.0.3538.13',
1446 '69.0.3497.90',
1447 '71.0.3546.2',
1448 '69.0.3497.89',
1449 '71.0.3546.1',
1450 '71.0.3546.0',
1451 '70.0.3538.12',
1452 '69.0.3497.88',
1453 '71.0.3545.4',
1454 '71.0.3545.3',
1455 '71.0.3545.2',
1456 '71.0.3545.1',
1457 '71.0.3545.0',
1458 '70.0.3538.11',
1459 '69.0.3497.87',
1460 '71.0.3544.5',
1461 '71.0.3544.4',
1462 '71.0.3544.3',
1463 '71.0.3544.2',
1464 '71.0.3544.1',
1465 '71.0.3544.0',
1466 '69.0.3497.86',
1467 '70.0.3538.10',
1468 '69.0.3497.85',
1469 '70.0.3538.9',
1470 '69.0.3497.84',
1471 '71.0.3543.4',
1472 '70.0.3538.8',
1473 '71.0.3543.3',
1474 '71.0.3543.2',
1475 '71.0.3543.1',
1476 '71.0.3543.0',
1477 '70.0.3538.7',
1478 '69.0.3497.83',
1479 '71.0.3542.2',
1480 '71.0.3542.1',
1481 '71.0.3542.0',
1482 '70.0.3538.6',
1483 '69.0.3497.82',
1484 '69.0.3497.81',
1485 '71.0.3541.1',
1486 '71.0.3541.0',
1487 '70.0.3538.5',
1488 '69.0.3497.80',
1489 '71.0.3540.1',
1490 '71.0.3540.0',
1491 '70.0.3538.4',
1492 '69.0.3497.79',
1493 '70.0.3538.3',
1494 '71.0.3539.1',
1495 '71.0.3539.0',
1496 '69.0.3497.78',
1497 '68.0.3440.134',
1498 '69.0.3497.77',
1499 '70.0.3538.2',
1500 '70.0.3538.1',
1501 '70.0.3538.0',
1502 '69.0.3497.76',
1503 '68.0.3440.133',
1504 '69.0.3497.75',
1505 '70.0.3537.2',
1506 '70.0.3537.1',
1507 '70.0.3537.0',
1508 '69.0.3497.74',
1509 '68.0.3440.132',
1510 '70.0.3536.0',
1511 '70.0.3535.5',
1512 '70.0.3535.4',
1513 '70.0.3535.3',
1514 '69.0.3497.73',
1515 '68.0.3440.131',
1516 '70.0.3532.8',
1517 '70.0.3532.7',
1518 '69.0.3497.72',
1519 '69.0.3497.71',
1520 '70.0.3535.2',
1521 '70.0.3535.1',
1522 '70.0.3535.0',
1523 '69.0.3497.70',
1524 '68.0.3440.130',
1525 '69.0.3497.69',
1526 '68.0.3440.129',
1527 '70.0.3534.4',
1528 '70.0.3534.3',
1529 '70.0.3534.2',
1530 '70.0.3534.1',
1531 '70.0.3534.0',
1532 '69.0.3497.68',
1533 '68.0.3440.128',
1534 '70.0.3533.2',
1535 '70.0.3533.1',
1536 '70.0.3533.0',
1537 '69.0.3497.67',
1538 '68.0.3440.127',
1539 '70.0.3532.6',
1540 '70.0.3532.5',
1541 '70.0.3532.4',
1542 '69.0.3497.66',
1543 '68.0.3440.126',
1544 '70.0.3532.3',
1545 '70.0.3532.2',
1546 '70.0.3532.1',
1547 '69.0.3497.60',
1548 '69.0.3497.65',
1549 '69.0.3497.64',
1550 '70.0.3532.0',
1551 '70.0.3531.0',
1552 '70.0.3530.4',
1553 '70.0.3530.3',
1554 '70.0.3530.2',
1555 '69.0.3497.58',
1556 '68.0.3440.125',
1557 '69.0.3497.57',
1558 '69.0.3497.56',
1559 '69.0.3497.55',
1560 '69.0.3497.54',
1561 '70.0.3530.1',
1562 '70.0.3530.0',
1563 '69.0.3497.53',
1564 '68.0.3440.124',
1565 '69.0.3497.52',
1566 '70.0.3529.3',
1567 '70.0.3529.2',
1568 '70.0.3529.1',
1569 '70.0.3529.0',
1570 '69.0.3497.51',
1571 '70.0.3528.4',
1572 '68.0.3440.123',
1573 '70.0.3528.3',
1574 '70.0.3528.2',
1575 '70.0.3528.1',
1576 '70.0.3528.0',
1577 '69.0.3497.50',
1578 '68.0.3440.122',
1579 '70.0.3527.1',
1580 '70.0.3527.0',
1581 '69.0.3497.49',
1582 '68.0.3440.121',
1583 '70.0.3526.1',
1584 '70.0.3526.0',
1585 '68.0.3440.120',
1586 '69.0.3497.48',
1587 '69.0.3497.47',
1588 '68.0.3440.119',
1589 '68.0.3440.118',
1590 '70.0.3525.5',
1591 '70.0.3525.4',
1592 '70.0.3525.3',
1593 '68.0.3440.117',
1594 '69.0.3497.46',
1595 '70.0.3525.2',
1596 '70.0.3525.1',
1597 '70.0.3525.0',
1598 '69.0.3497.45',
1599 '68.0.3440.116',
1600 '70.0.3524.4',
1601 '70.0.3524.3',
1602 '69.0.3497.44',
1603 '70.0.3524.2',
1604 '70.0.3524.1',
1605 '70.0.3524.0',
1606 '70.0.3523.2',
1607 '69.0.3497.43',
1608 '68.0.3440.115',
1609 '70.0.3505.9',
1610 '69.0.3497.42',
1611 '70.0.3505.8',
1612 '70.0.3523.1',
1613 '70.0.3523.0',
1614 '69.0.3497.41',
1615 '68.0.3440.114',
1616 '70.0.3505.7',
1617 '69.0.3497.40',
1618 '70.0.3522.1',
1619 '70.0.3522.0',
1620 '70.0.3521.2',
1621 '69.0.3497.39',
1622 '68.0.3440.113',
1623 '70.0.3505.6',
1624 '70.0.3521.1',
1625 '70.0.3521.0',
1626 '69.0.3497.38',
1627 '68.0.3440.112',
1628 '70.0.3520.1',
1629 '70.0.3520.0',
1630 '69.0.3497.37',
1631 '68.0.3440.111',
1632 '70.0.3519.3',
1633 '70.0.3519.2',
1634 '70.0.3519.1',
1635 '70.0.3519.0',
1636 '69.0.3497.36',
1637 '68.0.3440.110',
1638 '70.0.3518.1',
1639 '70.0.3518.0',
1640 '69.0.3497.35',
1641 '69.0.3497.34',
1642 '68.0.3440.109',
1643 '70.0.3517.1',
1644 '70.0.3517.0',
1645 '69.0.3497.33',
1646 '68.0.3440.108',
1647 '69.0.3497.32',
1648 '70.0.3516.3',
1649 '70.0.3516.2',
1650 '70.0.3516.1',
1651 '70.0.3516.0',
1652 '69.0.3497.31',
1653 '68.0.3440.107',
1654 '70.0.3515.4',
1655 '68.0.3440.106',
1656 '70.0.3515.3',
1657 '70.0.3515.2',
1658 '70.0.3515.1',
1659 '70.0.3515.0',
1660 '69.0.3497.30',
1661 '68.0.3440.105',
1662 '68.0.3440.104',
1663 '70.0.3514.2',
1664 '70.0.3514.1',
1665 '70.0.3514.0',
1666 '69.0.3497.29',
1667 '68.0.3440.103',
1668 '70.0.3513.1',
1669 '70.0.3513.0',
1670 '69.0.3497.28',
1671 )
1672 return _USER_AGENT_TPL % random.choice(_CHROME_VERSIONS)
1673
1674
1675 std_headers = {
1676 'User-Agent': random_user_agent(),
1677 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
1678 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
1679 'Accept-Encoding': 'gzip, deflate',
1680 'Accept-Language': 'en-us,en;q=0.5',
1681 }
1682
1683
1684 USER_AGENTS = {
1685 'Safari': 'Mozilla/5.0 (X11; Linux x86_64; rv:10.0) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27',
1686 }
1687
1688
1689 NO_DEFAULT = object()
1690
1691 ENGLISH_MONTH_NAMES = [
1692 'January', 'February', 'March', 'April', 'May', 'June',
1693 'July', 'August', 'September', 'October', 'November', 'December']
1694
1695 MONTH_NAMES = {
1696 'en': ENGLISH_MONTH_NAMES,
1697 'fr': [
1698 'janvier', 'février', 'mars', 'avril', 'mai', 'juin',
1699 'juillet', 'août', 'septembre', 'octobre', 'novembre', 'décembre'],
1700 }
1701
1702 KNOWN_EXTENSIONS = (
1703 'mp4', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'aac',
1704 'flv', 'f4v', 'f4a', 'f4b',
1705 'webm', 'ogg', 'ogv', 'oga', 'ogx', 'spx', 'opus',
1706 'mkv', 'mka', 'mk3d',
1707 'avi', 'divx',
1708 'mov',
1709 'asf', 'wmv', 'wma',
1710 '3gp', '3g2',
1711 'mp3',
1712 'flac',
1713 'ape',
1714 'wav',
1715 'f4f', 'f4m', 'm3u8', 'smil')
1716
1717 # needed for sanitizing filenames in restricted mode
1718 ACCENT_CHARS = dict(zip('ÂÃÄÀÁÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖŐØŒÙÚÛÜŰÝÞßàáâãäåæçèéêëìíîïðñòóôõöőøœùúûüűýþÿ',
1719 itertools.chain('AAAAAA', ['AE'], 'CEEEEIIIIDNOOOOOOO', ['OE'], 'UUUUUY', ['TH', 'ss'],
1720 'aaaaaa', ['ae'], 'ceeeeiiiionooooooo', ['oe'], 'uuuuuy', ['th'], 'y')))
1721
1722 DATE_FORMATS = (
1723 '%d %B %Y',
1724 '%d %b %Y',
1725 '%B %d %Y',
1726 '%B %dst %Y',
1727 '%B %dnd %Y',
1728 '%B %drd %Y',
1729 '%B %dth %Y',
1730 '%b %d %Y',
1731 '%b %dst %Y',
1732 '%b %dnd %Y',
1733 '%b %drd %Y',
1734 '%b %dth %Y',
1735 '%b %dst %Y %I:%M',
1736 '%b %dnd %Y %I:%M',
1737 '%b %drd %Y %I:%M',
1738 '%b %dth %Y %I:%M',
1739 '%Y %m %d',
1740 '%Y-%m-%d',
1741 '%Y/%m/%d',
1742 '%Y/%m/%d %H:%M',
1743 '%Y/%m/%d %H:%M:%S',
1744 '%Y-%m-%d %H:%M',
1745 '%Y-%m-%d %H:%M:%S',
1746 '%Y-%m-%d %H:%M:%S.%f',
1747 '%d.%m.%Y %H:%M',
1748 '%d.%m.%Y %H.%M',
1749 '%Y-%m-%dT%H:%M:%SZ',
1750 '%Y-%m-%dT%H:%M:%S.%fZ',
1751 '%Y-%m-%dT%H:%M:%S.%f0Z',
1752 '%Y-%m-%dT%H:%M:%S',
1753 '%Y-%m-%dT%H:%M:%S.%f',
1754 '%Y-%m-%dT%H:%M',
1755 '%b %d %Y at %H:%M',
1756 '%b %d %Y at %H:%M:%S',
1757 '%B %d %Y at %H:%M',
1758 '%B %d %Y at %H:%M:%S',
1759 )
1760
1761 DATE_FORMATS_DAY_FIRST = list(DATE_FORMATS)
1762 DATE_FORMATS_DAY_FIRST.extend([
1763 '%d-%m-%Y',
1764 '%d.%m.%Y',
1765 '%d.%m.%y',
1766 '%d/%m/%Y',
1767 '%d/%m/%y',
1768 '%d/%m/%Y %H:%M:%S',
1769 ])
1770
1771 DATE_FORMATS_MONTH_FIRST = list(DATE_FORMATS)
1772 DATE_FORMATS_MONTH_FIRST.extend([
1773 '%m-%d-%Y',
1774 '%m.%d.%Y',
1775 '%m/%d/%Y',
1776 '%m/%d/%y',
1777 '%m/%d/%Y %H:%M:%S',
1778 ])
1779
1780 PACKED_CODES_RE = r"}\('(.+)',(\d+),(\d+),'([^']+)'\.split\('\|'\)"
1781 JSON_LD_RE = r'(?is)<script[^>]+type=(["\']?)application/ld\+json\1[^>]*>(?P<json_ld>.+?)</script>'
1782
1783
1784 def preferredencoding():
1785 """Get preferred encoding.
1786
1787 Returns the best encoding scheme for the system, based on
1788 locale.getpreferredencoding() and some further tweaks.
1789 """
1790 try:
1791 pref = locale.getpreferredencoding()
1792 'TEST'.encode(pref)
1793 except Exception:
1794 pref = 'UTF-8'
1795
1796 return pref
1797
1798
1799 def write_json_file(obj, fn):
1800 """ Encode obj as JSON and write it to fn, atomically if possible """
1801
1802 fn = encodeFilename(fn)
1803 if sys.version_info < (3, 0) and sys.platform != 'win32':
1804 encoding = get_filesystem_encoding()
1805 # os.path.basename returns a bytes object, but NamedTemporaryFile
1806 # will fail if the filename contains non ascii characters unless we
1807 # use a unicode object
1808 path_basename = lambda f: os.path.basename(fn).decode(encoding)
1809 # the same for os.path.dirname
1810 path_dirname = lambda f: os.path.dirname(fn).decode(encoding)
1811 else:
1812 path_basename = os.path.basename
1813 path_dirname = os.path.dirname
1814
1815 args = {
1816 'suffix': '.tmp',
1817 'prefix': path_basename(fn) + '.',
1818 'dir': path_dirname(fn),
1819 'delete': False,
1820 }
1821
1822 # In Python 2.x, json.dump expects a bytestream.
1823 # In Python 3.x, it writes to a character stream
1824 if sys.version_info < (3, 0):
1825 args['mode'] = 'wb'
1826 else:
1827 args.update({
1828 'mode': 'w',
1829 'encoding': 'utf-8',
1830 })
1831
1832 tf = tempfile.NamedTemporaryFile(**compat_kwargs(args))
1833
1834 try:
1835 with tf:
1836 json.dump(obj, tf)
1837 if sys.platform == 'win32':
1838 # Need to remove existing file on Windows, else os.rename raises
1839 # WindowsError or FileExistsError.
1840 try:
1841 os.unlink(fn)
1842 except OSError:
1843 pass
1844 try:
1845 mask = os.umask(0)
1846 os.umask(mask)
1847 os.chmod(tf.name, 0o666 & ~mask)
1848 except OSError:
1849 pass
1850 os.rename(tf.name, fn)
1851 except Exception:
1852 try:
1853 os.remove(tf.name)
1854 except OSError:
1855 pass
1856 raise
1857
1858
1859 if sys.version_info >= (2, 7):
1860 def find_xpath_attr(node, xpath, key, val=None):
1861 """ Find the xpath xpath[@key=val] """
1862 assert re.match(r'^[a-zA-Z_-]+$', key)
1863 expr = xpath + ('[@%s]' % key if val is None else "[@%s='%s']" % (key, val))
1864 return node.find(expr)
1865 else:
1866 def find_xpath_attr(node, xpath, key, val=None):
1867 for f in node.findall(compat_xpath(xpath)):
1868 if key not in f.attrib:
1869 continue
1870 if val is None or f.attrib.get(key) == val:
1871 return f
1872 return None
1873
1874 # On python2.6 the xml.etree.ElementTree.Element methods don't support
1875 # the namespace parameter
1876
1877
1878 def xpath_with_ns(path, ns_map):
1879 components = [c.split(':') for c in path.split('/')]
1880 replaced = []
1881 for c in components:
1882 if len(c) == 1:
1883 replaced.append(c[0])
1884 else:
1885 ns, tag = c
1886 replaced.append('{%s}%s' % (ns_map[ns], tag))
1887 return '/'.join(replaced)
1888
1889
1890 def xpath_element(node, xpath, name=None, fatal=False, default=NO_DEFAULT):
1891 def _find_xpath(xpath):
1892 return node.find(compat_xpath(xpath))
1893
1894 if isinstance(xpath, (str, compat_str)):
1895 n = _find_xpath(xpath)
1896 else:
1897 for xp in xpath:
1898 n = _find_xpath(xp)
1899 if n is not None:
1900 break
1901
1902 if n is None:
1903 if default is not NO_DEFAULT:
1904 return default
1905 elif fatal:
1906 name = xpath if name is None else name
1907 raise ExtractorError('Could not find XML element %s' % name)
1908 else:
1909 return None
1910 return n
1911
1912
1913 def xpath_text(node, xpath, name=None, fatal=False, default=NO_DEFAULT):
1914 n = xpath_element(node, xpath, name, fatal=fatal, default=default)
1915 if n is None or n == default:
1916 return n
1917 if n.text is None:
1918 if default is not NO_DEFAULT:
1919 return default
1920 elif fatal:
1921 name = xpath if name is None else name
1922 raise ExtractorError('Could not find XML element\'s text %s' % name)
1923 else:
1924 return None
1925 return n.text
1926
1927
1928 def xpath_attr(node, xpath, key, name=None, fatal=False, default=NO_DEFAULT):
1929 n = find_xpath_attr(node, xpath, key)
1930 if n is None:
1931 if default is not NO_DEFAULT:
1932 return default
1933 elif fatal:
1934 name = '%s[@%s]' % (xpath, key) if name is None else name
1935 raise ExtractorError('Could not find XML attribute %s' % name)
1936 else:
1937 return None
1938 return n.attrib[key]
1939
1940
1941 def get_element_by_id(id, html):
1942 """Return the content of the tag with the specified ID in the passed HTML document"""
1943 return get_element_by_attribute('id', id, html)
1944
1945
1946 def get_element_by_class(class_name, html):
1947 """Return the content of the first tag with the specified class in the passed HTML document"""
1948 retval = get_elements_by_class(class_name, html)
1949 return retval[0] if retval else None
1950
1951
1952 def get_element_by_attribute(attribute, value, html, escape_value=True):
1953 retval = get_elements_by_attribute(attribute, value, html, escape_value)
1954 return retval[0] if retval else None
1955
1956
1957 def get_elements_by_class(class_name, html):
1958 """Return the content of all tags with the specified class in the passed HTML document as a list"""
1959 return get_elements_by_attribute(
1960 'class', r'[^\'"]*\b%s\b[^\'"]*' % re.escape(class_name),
1961 html, escape_value=False)
1962
1963
1964 def get_elements_by_attribute(attribute, value, html, escape_value=True):
1965 """Return the content of the tag with the specified attribute in the passed HTML document"""
1966
1967 value = re.escape(value) if escape_value else value
1968
1969 retlist = []
1970 for m in re.finditer(r'''(?xs)
1971 <([a-zA-Z0-9:._-]+)
1972 (?:\s+[a-zA-Z0-9:._-]+(?:=[a-zA-Z0-9:._-]*|="[^"]*"|='[^']*'|))*?
1973 \s+%s=['"]?%s['"]?
1974 (?:\s+[a-zA-Z0-9:._-]+(?:=[a-zA-Z0-9:._-]*|="[^"]*"|='[^']*'|))*?
1975 \s*>
1976 (?P<content>.*?)
1977 </\1>
1978 ''' % (re.escape(attribute), value), html):
1979 res = m.group('content')
1980
1981 if res.startswith('"') or res.startswith("'"):
1982 res = res[1:-1]
1983
1984 retlist.append(unescapeHTML(res))
1985
1986 return retlist
1987
1988
1989 class HTMLAttributeParser(compat_HTMLParser):
1990 """Trivial HTML parser to gather the attributes for a single element"""
1991
1992 def __init__(self):
1993 self.attrs = {}
1994 compat_HTMLParser.__init__(self)
1995
1996 def handle_starttag(self, tag, attrs):
1997 self.attrs = dict(attrs)
1998
1999
2000 def extract_attributes(html_element):
2001 """Given a string for an HTML element such as
2002 <el
2003 a="foo" B="bar" c="&98;az" d=boz
2004 empty= noval entity="&amp;"
2005 sq='"' dq="'"
2006 >
2007 Decode and return a dictionary of attributes.
2008 {
2009 'a': 'foo', 'b': 'bar', c: 'baz', d: 'boz',
2010 'empty': '', 'noval': None, 'entity': '&',
2011 'sq': '"', 'dq': '\''
2012 }.
2013 NB HTMLParser is stricter in Python 2.6 & 3.2 than in later versions,
2014 but the cases in the unit test will work for all of 2.6, 2.7, 3.2-3.5.
2015 """
2016 parser = HTMLAttributeParser()
2017 try:
2018 parser.feed(html_element)
2019 parser.close()
2020 # Older Python may throw HTMLParseError in case of malformed HTML
2021 except compat_HTMLParseError:
2022 pass
2023 return parser.attrs
2024
2025
2026 def clean_html(html):
2027 """Clean an HTML snippet into a readable string"""
2028
2029 if html is None: # Convenience for sanitizing descriptions etc.
2030 return html
2031
2032 # Newline vs <br />
2033 html = html.replace('\n', ' ')
2034 html = re.sub(r'(?u)\s*<\s*br\s*/?\s*>\s*', '\n', html)
2035 html = re.sub(r'(?u)<\s*/\s*p\s*>\s*<\s*p[^>]*>', '\n', html)
2036 # Strip html tags
2037 html = re.sub('<.*?>', '', html)
2038 # Replace html entities
2039 html = unescapeHTML(html)
2040 return html.strip()
2041
2042
2043 def sanitize_open(filename, open_mode):
2044 """Try to open the given filename, and slightly tweak it if this fails.
2045
2046 Attempts to open the given filename. If this fails, it tries to change
2047 the filename slightly, step by step, until it's either able to open it
2048 or it fails and raises a final exception, like the standard open()
2049 function.
2050
2051 It returns the tuple (stream, definitive_file_name).
2052 """
2053 try:
2054 if filename == '-':
2055 if sys.platform == 'win32':
2056 import msvcrt
2057 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
2058 return (sys.stdout.buffer if hasattr(sys.stdout, 'buffer') else sys.stdout, filename)
2059 stream = open(encodeFilename(filename), open_mode)
2060 return (stream, filename)
2061 except (IOError, OSError) as err:
2062 if err.errno in (errno.EACCES,):
2063 raise
2064
2065 # In case of error, try to remove win32 forbidden chars
2066 alt_filename = sanitize_path(filename)
2067 if alt_filename == filename:
2068 raise
2069 else:
2070 # An exception here should be caught in the caller
2071 stream = open(encodeFilename(alt_filename), open_mode)
2072 return (stream, alt_filename)
2073
2074
2075 def timeconvert(timestr):
2076 """Convert RFC 2822 defined time string into system timestamp"""
2077 timestamp = None
2078 timetuple = email.utils.parsedate_tz(timestr)
2079 if timetuple is not None:
2080 timestamp = email.utils.mktime_tz(timetuple)
2081 return timestamp
2082
2083
2084 def sanitize_filename(s, restricted=False, is_id=False):
2085 """Sanitizes a string so it could be used as part of a filename.
2086 If restricted is set, use a stricter subset of allowed characters.
2087 Set is_id if this is not an arbitrary string, but an ID that should be kept
2088 if possible.
2089 """
2090 def replace_insane(char):
2091 if restricted and char in ACCENT_CHARS:
2092 return ACCENT_CHARS[char]
2093 if char == '?' or ord(char) < 32 or ord(char) == 127:
2094 return ''
2095 elif char == '"':
2096 return '' if restricted else '\''
2097 elif char == ':':
2098 return '_-' if restricted else ' -'
2099 elif char in '\\/|*<>':
2100 return '_'
2101 if restricted and (char in '!&\'()[]{}$;`^,#' or char.isspace()):
2102 return '_'
2103 if restricted and ord(char) > 127:
2104 return '_'
2105 return char
2106
2107 # Handle timestamps
2108 s = re.sub(r'[0-9]+(?::[0-9]+)+', lambda m: m.group(0).replace(':', '_'), s)
2109 result = ''.join(map(replace_insane, s))
2110 if not is_id:
2111 while '__' in result:
2112 result = result.replace('__', '_')
2113 result = result.strip('_')
2114 # Common case of "Foreign band name - English song title"
2115 if restricted and result.startswith('-_'):
2116 result = result[2:]
2117 if result.startswith('-'):
2118 result = '_' + result[len('-'):]
2119 result = result.lstrip('.')
2120 if not result:
2121 result = '_'
2122 return result
2123
2124
2125 def sanitize_path(s):
2126 """Sanitizes and normalizes path on Windows"""
2127 if sys.platform != 'win32':
2128 return s
2129 drive_or_unc, _ = os.path.splitdrive(s)
2130 if sys.version_info < (2, 7) and not drive_or_unc:
2131 drive_or_unc, _ = os.path.splitunc(s)
2132 norm_path = os.path.normpath(remove_start(s, drive_or_unc)).split(os.path.sep)
2133 if drive_or_unc:
2134 norm_path.pop(0)
2135 sanitized_path = [
2136 path_part if path_part in ['.', '..'] else re.sub(r'(?:[/<>:"\|\\?\*]|[\s.]$)', '#', path_part)
2137 for path_part in norm_path]
2138 if drive_or_unc:
2139 sanitized_path.insert(0, drive_or_unc + os.path.sep)
2140 return os.path.join(*sanitized_path)
2141
2142
2143 def sanitize_url(url):
2144 # Prepend protocol-less URLs with `http:` scheme in order to mitigate
2145 # the number of unwanted failures due to missing protocol
2146 if url.startswith('//'):
2147 return 'http:%s' % url
2148 # Fix some common typos seen so far
2149 COMMON_TYPOS = (
2150 # https://github.com/ytdl-org/youtube-dl/issues/15649
2151 (r'^httpss://', r'https://'),
2152 # https://bx1.be/lives/direct-tv/
2153 (r'^rmtp([es]?)://', r'rtmp\1://'),
2154 )
2155 for mistake, fixup in COMMON_TYPOS:
2156 if re.match(mistake, url):
2157 return re.sub(mistake, fixup, url)
2158 return url
2159
2160
2161 def sanitized_Request(url, *args, **kwargs):
2162 return compat_urllib_request.Request(sanitize_url(url), *args, **kwargs)
2163
2164
2165 def expand_path(s):
2166 """Expand shell variables and ~"""
2167 return os.path.expandvars(compat_expanduser(s))
2168
2169
2170 def orderedSet(iterable):
2171 """ Remove all duplicates from the input iterable """
2172 res = []
2173 for el in iterable:
2174 if el not in res:
2175 res.append(el)
2176 return res
2177
2178
2179 def _htmlentity_transform(entity_with_semicolon):
2180 """Transforms an HTML entity to a character."""
2181 entity = entity_with_semicolon[:-1]
2182
2183 # Known non-numeric HTML entity
2184 if entity in compat_html_entities.name2codepoint:
2185 return compat_chr(compat_html_entities.name2codepoint[entity])
2186
2187 # TODO: HTML5 allows entities without a semicolon. For example,
2188 # '&Eacuteric' should be decoded as 'Éric'.
2189 if entity_with_semicolon in compat_html_entities_html5:
2190 return compat_html_entities_html5[entity_with_semicolon]
2191
2192 mobj = re.match(r'#(x[0-9a-fA-F]+|[0-9]+)', entity)
2193 if mobj is not None:
2194 numstr = mobj.group(1)
2195 if numstr.startswith('x'):
2196 base = 16
2197 numstr = '0%s' % numstr
2198 else:
2199 base = 10
2200 # See https://github.com/ytdl-org/youtube-dl/issues/7518
2201 try:
2202 return compat_chr(int(numstr, base))
2203 except ValueError:
2204 pass
2205
2206 # Unknown entity in name, return its literal representation
2207 return '&%s;' % entity
2208
2209
2210 def unescapeHTML(s):
2211 if s is None:
2212 return None
2213 assert type(s) == compat_str
2214
2215 return re.sub(
2216 r'&([^&;]+;)', lambda m: _htmlentity_transform(m.group(1)), s)
2217
2218
2219 def process_communicate_or_kill(p, *args, **kwargs):
2220 try:
2221 return p.communicate(*args, **kwargs)
2222 except BaseException: # Including KeyboardInterrupt
2223 p.kill()
2224 p.wait()
2225 raise
2226
2227
2228 def get_subprocess_encoding():
2229 if sys.platform == 'win32' and sys.getwindowsversion()[0] >= 5:
2230 # For subprocess calls, encode with locale encoding
2231 # Refer to http://stackoverflow.com/a/9951851/35070
2232 encoding = preferredencoding()
2233 else:
2234 encoding = sys.getfilesystemencoding()
2235 if encoding is None:
2236 encoding = 'utf-8'
2237 return encoding
2238
2239
2240 def encodeFilename(s, for_subprocess=False):
2241 """
2242 @param s The name of the file
2243 """
2244
2245 assert type(s) == compat_str
2246
2247 # Python 3 has a Unicode API
2248 if sys.version_info >= (3, 0):
2249 return s
2250
2251 # Pass '' directly to use Unicode APIs on Windows 2000 and up
2252 # (Detecting Windows NT 4 is tricky because 'major >= 4' would
2253 # match Windows 9x series as well. Besides, NT 4 is obsolete.)
2254 if not for_subprocess and sys.platform == 'win32' and sys.getwindowsversion()[0] >= 5:
2255 return s
2256
2257 # Jython assumes filenames are Unicode strings though reported as Python 2.x compatible
2258 if sys.platform.startswith('java'):
2259 return s
2260
2261 return s.encode(get_subprocess_encoding(), 'ignore')
2262
2263
2264 def decodeFilename(b, for_subprocess=False):
2265
2266 if sys.version_info >= (3, 0):
2267 return b
2268
2269 if not isinstance(b, bytes):
2270 return b
2271
2272 return b.decode(get_subprocess_encoding(), 'ignore')
2273
2274
2275 def encodeArgument(s):
2276 if not isinstance(s, compat_str):
2277 # Legacy code that uses byte strings
2278 # Uncomment the following line after fixing all post processors
2279 # assert False, 'Internal error: %r should be of type %r, is %r' % (s, compat_str, type(s))
2280 s = s.decode('ascii')
2281 return encodeFilename(s, True)
2282
2283
2284 def decodeArgument(b):
2285 return decodeFilename(b, True)
2286
2287
2288 def decodeOption(optval):
2289 if optval is None:
2290 return optval
2291 if isinstance(optval, bytes):
2292 optval = optval.decode(preferredencoding())
2293
2294 assert isinstance(optval, compat_str)
2295 return optval
2296
2297
2298 def formatSeconds(secs, delim=':'):
2299 if secs > 3600:
2300 return '%d%s%02d%s%02d' % (secs // 3600, delim, (secs % 3600) // 60, delim, secs % 60)
2301 elif secs > 60:
2302 return '%d%s%02d' % (secs // 60, delim, secs % 60)
2303 else:
2304 return '%d' % secs
2305
2306
2307 def make_HTTPS_handler(params, **kwargs):
2308 opts_no_check_certificate = params.get('nocheckcertificate', False)
2309 if hasattr(ssl, 'create_default_context'): # Python >= 3.4 or 2.7.9
2310 context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
2311 if opts_no_check_certificate:
2312 context.check_hostname = False
2313 context.verify_mode = ssl.CERT_NONE
2314 try:
2315 return YoutubeDLHTTPSHandler(params, context=context, **kwargs)
2316 except TypeError:
2317 # Python 2.7.8
2318 # (create_default_context present but HTTPSHandler has no context=)
2319 pass
2320
2321 if sys.version_info < (3, 2):
2322 return YoutubeDLHTTPSHandler(params, **kwargs)
2323 else: # Python < 3.4
2324 context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
2325 context.verify_mode = (ssl.CERT_NONE
2326 if opts_no_check_certificate
2327 else ssl.CERT_REQUIRED)
2328 context.set_default_verify_paths()
2329 return YoutubeDLHTTPSHandler(params, context=context, **kwargs)
2330
2331
2332 def bug_reports_message():
2333 if ytdl_is_updateable():
2334 update_cmd = 'type youtube-dlc -U to update'
2335 else:
2336 update_cmd = 'see https://github.com/pukkandan/yt-dlp on how to update'
2337 msg = '; please report this issue on https://github.com/pukkandan/yt-dlp .'
2338 msg += ' Make sure you are using the latest version; %s.' % update_cmd
2339 msg += ' Be sure to call youtube-dlc with the --verbose flag and include its complete output.'
2340 return msg
2341
2342
2343 class YoutubeDLError(Exception):
2344 """Base exception for YoutubeDL errors."""
2345 pass
2346
2347
2348 class ExtractorError(YoutubeDLError):
2349 """Error during info extraction."""
2350
2351 def __init__(self, msg, tb=None, expected=False, cause=None, video_id=None):
2352 """ tb, if given, is the original traceback (so that it can be printed out).
2353 If expected is set, this is a normal error message and most likely not a bug in youtube-dlc.
2354 """
2355
2356 if sys.exc_info()[0] in (compat_urllib_error.URLError, socket.timeout, UnavailableVideoError):
2357 expected = True
2358 if video_id is not None:
2359 msg = video_id + ': ' + msg
2360 if cause:
2361 msg += ' (caused by %r)' % cause
2362 if not expected:
2363 msg += bug_reports_message()
2364 super(ExtractorError, self).__init__(msg)
2365
2366 self.traceback = tb
2367 self.exc_info = sys.exc_info() # preserve original exception
2368 self.cause = cause
2369 self.video_id = video_id
2370
2371 def format_traceback(self):
2372 if self.traceback is None:
2373 return None
2374 return ''.join(traceback.format_tb(self.traceback))
2375
2376
2377 class UnsupportedError(ExtractorError):
2378 def __init__(self, url):
2379 super(UnsupportedError, self).__init__(
2380 'Unsupported URL: %s' % url, expected=True)
2381 self.url = url
2382
2383
2384 class RegexNotFoundError(ExtractorError):
2385 """Error when a regex didn't match"""
2386 pass
2387
2388
2389 class GeoRestrictedError(ExtractorError):
2390 """Geographic restriction Error exception.
2391
2392 This exception may be thrown when a video is not available from your
2393 geographic location due to geographic restrictions imposed by a website.
2394 """
2395
2396 def __init__(self, msg, countries=None):
2397 super(GeoRestrictedError, self).__init__(msg, expected=True)
2398 self.msg = msg
2399 self.countries = countries
2400
2401
2402 class DownloadError(YoutubeDLError):
2403 """Download Error exception.
2404
2405 This exception may be thrown by FileDownloader objects if they are not
2406 configured to continue on errors. They will contain the appropriate
2407 error message.
2408 """
2409
2410 def __init__(self, msg, exc_info=None):
2411 """ exc_info, if given, is the original exception that caused the trouble (as returned by sys.exc_info()). """
2412 super(DownloadError, self).__init__(msg)
2413 self.exc_info = exc_info
2414
2415
2416 class SameFileError(YoutubeDLError):
2417 """Same File exception.
2418
2419 This exception will be thrown by FileDownloader objects if they detect
2420 multiple files would have to be downloaded to the same file on disk.
2421 """
2422 pass
2423
2424
2425 class PostProcessingError(YoutubeDLError):
2426 """Post Processing exception.
2427
2428 This exception may be raised by PostProcessor's .run() method to
2429 indicate an error in the postprocessing task.
2430 """
2431
2432 def __init__(self, msg):
2433 super(PostProcessingError, self).__init__(msg)
2434 self.msg = msg
2435
2436
2437 class ExistingVideoReached(YoutubeDLError):
2438 """ --max-downloads limit has been reached. """
2439 pass
2440
2441
2442 class RejectedVideoReached(YoutubeDLError):
2443 """ --max-downloads limit has been reached. """
2444 pass
2445
2446
2447 class MaxDownloadsReached(YoutubeDLError):
2448 """ --max-downloads limit has been reached. """
2449 pass
2450
2451
2452 class UnavailableVideoError(YoutubeDLError):
2453 """Unavailable Format exception.
2454
2455 This exception will be thrown when a video is requested
2456 in a format that is not available for that video.
2457 """
2458 pass
2459
2460
2461 class ContentTooShortError(YoutubeDLError):
2462 """Content Too Short exception.
2463
2464 This exception may be raised by FileDownloader objects when a file they
2465 download is too small for what the server announced first, indicating
2466 the connection was probably interrupted.
2467 """
2468
2469 def __init__(self, downloaded, expected):
2470 super(ContentTooShortError, self).__init__(
2471 'Downloaded {0} bytes, expected {1} bytes'.format(downloaded, expected)
2472 )
2473 # Both in bytes
2474 self.downloaded = downloaded
2475 self.expected = expected
2476
2477
2478 class XAttrMetadataError(YoutubeDLError):
2479 def __init__(self, code=None, msg='Unknown error'):
2480 super(XAttrMetadataError, self).__init__(msg)
2481 self.code = code
2482 self.msg = msg
2483
2484 # Parsing code and msg
2485 if (self.code in (errno.ENOSPC, errno.EDQUOT)
2486 or 'No space left' in self.msg or 'Disk quota exceeded' in self.msg):
2487 self.reason = 'NO_SPACE'
2488 elif self.code == errno.E2BIG or 'Argument list too long' in self.msg:
2489 self.reason = 'VALUE_TOO_LONG'
2490 else:
2491 self.reason = 'NOT_SUPPORTED'
2492
2493
2494 class XAttrUnavailableError(YoutubeDLError):
2495 pass
2496
2497
2498 def _create_http_connection(ydl_handler, http_class, is_https, *args, **kwargs):
2499 # Working around python 2 bug (see http://bugs.python.org/issue17849) by limiting
2500 # expected HTTP responses to meet HTTP/1.0 or later (see also
2501 # https://github.com/ytdl-org/youtube-dl/issues/6727)
2502 if sys.version_info < (3, 0):
2503 kwargs['strict'] = True
2504 hc = http_class(*args, **compat_kwargs(kwargs))
2505 source_address = ydl_handler._params.get('source_address')
2506
2507 if source_address is not None:
2508 # This is to workaround _create_connection() from socket where it will try all
2509 # address data from getaddrinfo() including IPv6. This filters the result from
2510 # getaddrinfo() based on the source_address value.
2511 # This is based on the cpython socket.create_connection() function.
2512 # https://github.com/python/cpython/blob/master/Lib/socket.py#L691
2513 def _create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None):
2514 host, port = address
2515 err = None
2516 addrs = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
2517 af = socket.AF_INET if '.' in source_address[0] else socket.AF_INET6
2518 ip_addrs = [addr for addr in addrs if addr[0] == af]
2519 if addrs and not ip_addrs:
2520 ip_version = 'v4' if af == socket.AF_INET else 'v6'
2521 raise socket.error(
2522 "No remote IP%s addresses available for connect, can't use '%s' as source address"
2523 % (ip_version, source_address[0]))
2524 for res in ip_addrs:
2525 af, socktype, proto, canonname, sa = res
2526 sock = None
2527 try:
2528 sock = socket.socket(af, socktype, proto)
2529 if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
2530 sock.settimeout(timeout)
2531 sock.bind(source_address)
2532 sock.connect(sa)
2533 err = None # Explicitly break reference cycle
2534 return sock
2535 except socket.error as _:
2536 err = _
2537 if sock is not None:
2538 sock.close()
2539 if err is not None:
2540 raise err
2541 else:
2542 raise socket.error('getaddrinfo returns an empty list')
2543 if hasattr(hc, '_create_connection'):
2544 hc._create_connection = _create_connection
2545 sa = (source_address, 0)
2546 if hasattr(hc, 'source_address'): # Python 2.7+
2547 hc.source_address = sa
2548 else: # Python 2.6
2549 def _hc_connect(self, *args, **kwargs):
2550 sock = _create_connection(
2551 (self.host, self.port), self.timeout, sa)
2552 if is_https:
2553 self.sock = ssl.wrap_socket(
2554 sock, self.key_file, self.cert_file,
2555 ssl_version=ssl.PROTOCOL_TLSv1)
2556 else:
2557 self.sock = sock
2558 hc.connect = functools.partial(_hc_connect, hc)
2559
2560 return hc
2561
2562
2563 def handle_youtubedl_headers(headers):
2564 filtered_headers = headers
2565
2566 if 'Youtubedl-no-compression' in filtered_headers:
2567 filtered_headers = dict((k, v) for k, v in filtered_headers.items() if k.lower() != 'accept-encoding')
2568 del filtered_headers['Youtubedl-no-compression']
2569
2570 return filtered_headers
2571
2572
2573 class YoutubeDLHandler(compat_urllib_request.HTTPHandler):
2574 """Handler for HTTP requests and responses.
2575
2576 This class, when installed with an OpenerDirector, automatically adds
2577 the standard headers to every HTTP request and handles gzipped and
2578 deflated responses from web servers. If compression is to be avoided in
2579 a particular request, the original request in the program code only has
2580 to include the HTTP header "Youtubedl-no-compression", which will be
2581 removed before making the real request.
2582
2583 Part of this code was copied from:
2584
2585 http://techknack.net/python-urllib2-handlers/
2586
2587 Andrew Rowls, the author of that code, agreed to release it to the
2588 public domain.
2589 """
2590
2591 def __init__(self, params, *args, **kwargs):
2592 compat_urllib_request.HTTPHandler.__init__(self, *args, **kwargs)
2593 self._params = params
2594
2595 def http_open(self, req):
2596 conn_class = compat_http_client.HTTPConnection
2597
2598 socks_proxy = req.headers.get('Ytdl-socks-proxy')
2599 if socks_proxy:
2600 conn_class = make_socks_conn_class(conn_class, socks_proxy)
2601 del req.headers['Ytdl-socks-proxy']
2602
2603 return self.do_open(functools.partial(
2604 _create_http_connection, self, conn_class, False),
2605 req)
2606
2607 @staticmethod
2608 def deflate(data):
2609 try:
2610 return zlib.decompress(data, -zlib.MAX_WBITS)
2611 except zlib.error:
2612 return zlib.decompress(data)
2613
2614 def http_request(self, req):
2615 # According to RFC 3986, URLs can not contain non-ASCII characters, however this is not
2616 # always respected by websites, some tend to give out URLs with non percent-encoded
2617 # non-ASCII characters (see telemb.py, ard.py [#3412])
2618 # urllib chokes on URLs with non-ASCII characters (see http://bugs.python.org/issue3991)
2619 # To work around aforementioned issue we will replace request's original URL with
2620 # percent-encoded one
2621 # Since redirects are also affected (e.g. http://www.southpark.de/alle-episoden/s18e09)
2622 # the code of this workaround has been moved here from YoutubeDL.urlopen()
2623 url = req.get_full_url()
2624 url_escaped = escape_url(url)
2625
2626 # Substitute URL if any change after escaping
2627 if url != url_escaped:
2628 req = update_Request(req, url=url_escaped)
2629
2630 for h, v in std_headers.items():
2631 # Capitalize is needed because of Python bug 2275: http://bugs.python.org/issue2275
2632 # The dict keys are capitalized because of this bug by urllib
2633 if h.capitalize() not in req.headers:
2634 req.add_header(h, v)
2635
2636 req.headers = handle_youtubedl_headers(req.headers)
2637
2638 if sys.version_info < (2, 7) and '#' in req.get_full_url():
2639 # Python 2.6 is brain-dead when it comes to fragments
2640 req._Request__original = req._Request__original.partition('#')[0]
2641 req._Request__r_type = req._Request__r_type.partition('#')[0]
2642
2643 return req
2644
2645 def http_response(self, req, resp):
2646 old_resp = resp
2647 # gzip
2648 if resp.headers.get('Content-encoding', '') == 'gzip':
2649 content = resp.read()
2650 gz = gzip.GzipFile(fileobj=io.BytesIO(content), mode='rb')
2651 try:
2652 uncompressed = io.BytesIO(gz.read())
2653 except IOError as original_ioerror:
2654 # There may be junk add the end of the file
2655 # See http://stackoverflow.com/q/4928560/35070 for details
2656 for i in range(1, 1024):
2657 try:
2658 gz = gzip.GzipFile(fileobj=io.BytesIO(content[:-i]), mode='rb')
2659 uncompressed = io.BytesIO(gz.read())
2660 except IOError:
2661 continue
2662 break
2663 else:
2664 raise original_ioerror
2665 resp = compat_urllib_request.addinfourl(uncompressed, old_resp.headers, old_resp.url, old_resp.code)
2666 resp.msg = old_resp.msg
2667 del resp.headers['Content-encoding']
2668 # deflate
2669 if resp.headers.get('Content-encoding', '') == 'deflate':
2670 gz = io.BytesIO(self.deflate(resp.read()))
2671 resp = compat_urllib_request.addinfourl(gz, old_resp.headers, old_resp.url, old_resp.code)
2672 resp.msg = old_resp.msg
2673 del resp.headers['Content-encoding']
2674 # Percent-encode redirect URL of Location HTTP header to satisfy RFC 3986 (see
2675 # https://github.com/ytdl-org/youtube-dl/issues/6457).
2676 if 300 <= resp.code < 400:
2677 location = resp.headers.get('Location')
2678 if location:
2679 # As of RFC 2616 default charset is iso-8859-1 that is respected by python 3
2680 if sys.version_info >= (3, 0):
2681 location = location.encode('iso-8859-1').decode('utf-8')
2682 else:
2683 location = location.decode('utf-8')
2684 location_escaped = escape_url(location)
2685 if location != location_escaped:
2686 del resp.headers['Location']
2687 if sys.version_info < (3, 0):
2688 location_escaped = location_escaped.encode('utf-8')
2689 resp.headers['Location'] = location_escaped
2690 return resp
2691
2692 https_request = http_request
2693 https_response = http_response
2694
2695
2696 def make_socks_conn_class(base_class, socks_proxy):
2697 assert issubclass(base_class, (
2698 compat_http_client.HTTPConnection, compat_http_client.HTTPSConnection))
2699
2700 url_components = compat_urlparse.urlparse(socks_proxy)
2701 if url_components.scheme.lower() == 'socks5':
2702 socks_type = ProxyType.SOCKS5
2703 elif url_components.scheme.lower() in ('socks', 'socks4'):
2704 socks_type = ProxyType.SOCKS4
2705 elif url_components.scheme.lower() == 'socks4a':
2706 socks_type = ProxyType.SOCKS4A
2707
2708 def unquote_if_non_empty(s):
2709 if not s:
2710 return s
2711 return compat_urllib_parse_unquote_plus(s)
2712
2713 proxy_args = (
2714 socks_type,
2715 url_components.hostname, url_components.port or 1080,
2716 True, # Remote DNS
2717 unquote_if_non_empty(url_components.username),
2718 unquote_if_non_empty(url_components.password),
2719 )
2720
2721 class SocksConnection(base_class):
2722 def connect(self):
2723 self.sock = sockssocket()
2724 self.sock.setproxy(*proxy_args)
2725 if type(self.timeout) in (int, float):
2726 self.sock.settimeout(self.timeout)
2727 self.sock.connect((self.host, self.port))
2728
2729 if isinstance(self, compat_http_client.HTTPSConnection):
2730 if hasattr(self, '_context'): # Python > 2.6
2731 self.sock = self._context.wrap_socket(
2732 self.sock, server_hostname=self.host)
2733 else:
2734 self.sock = ssl.wrap_socket(self.sock)
2735
2736 return SocksConnection
2737
2738
2739 class YoutubeDLHTTPSHandler(compat_urllib_request.HTTPSHandler):
2740 def __init__(self, params, https_conn_class=None, *args, **kwargs):
2741 compat_urllib_request.HTTPSHandler.__init__(self, *args, **kwargs)
2742 self._https_conn_class = https_conn_class or compat_http_client.HTTPSConnection
2743 self._params = params
2744
2745 def https_open(self, req):
2746 kwargs = {}
2747 conn_class = self._https_conn_class
2748
2749 if hasattr(self, '_context'): # python > 2.6
2750 kwargs['context'] = self._context
2751 if hasattr(self, '_check_hostname'): # python 3.x
2752 kwargs['check_hostname'] = self._check_hostname
2753
2754 socks_proxy = req.headers.get('Ytdl-socks-proxy')
2755 if socks_proxy:
2756 conn_class = make_socks_conn_class(conn_class, socks_proxy)
2757 del req.headers['Ytdl-socks-proxy']
2758
2759 return self.do_open(functools.partial(
2760 _create_http_connection, self, conn_class, True),
2761 req, **kwargs)
2762
2763
2764 class YoutubeDLCookieJar(compat_cookiejar.MozillaCookieJar):
2765 """
2766 See [1] for cookie file format.
2767
2768 1. https://curl.haxx.se/docs/http-cookies.html
2769 """
2770 _HTTPONLY_PREFIX = '#HttpOnly_'
2771 _ENTRY_LEN = 7
2772 _HEADER = '''# Netscape HTTP Cookie File
2773 # This file is generated by youtube-dlc. Do not edit.
2774
2775 '''
2776 _CookieFileEntry = collections.namedtuple(
2777 'CookieFileEntry',
2778 ('domain_name', 'include_subdomains', 'path', 'https_only', 'expires_at', 'name', 'value'))
2779
2780 def save(self, filename=None, ignore_discard=False, ignore_expires=False):
2781 """
2782 Save cookies to a file.
2783
2784 Most of the code is taken from CPython 3.8 and slightly adapted
2785 to support cookie files with UTF-8 in both python 2 and 3.
2786 """
2787 if filename is None:
2788 if self.filename is not None:
2789 filename = self.filename
2790 else:
2791 raise ValueError(compat_cookiejar.MISSING_FILENAME_TEXT)
2792
2793 # Store session cookies with `expires` set to 0 instead of an empty
2794 # string
2795 for cookie in self:
2796 if cookie.expires is None:
2797 cookie.expires = 0
2798
2799 with io.open(filename, 'w', encoding='utf-8') as f:
2800 f.write(self._HEADER)
2801 now = time.time()
2802 for cookie in self:
2803 if not ignore_discard and cookie.discard:
2804 continue
2805 if not ignore_expires and cookie.is_expired(now):
2806 continue
2807 if cookie.secure:
2808 secure = 'TRUE'
2809 else:
2810 secure = 'FALSE'
2811 if cookie.domain.startswith('.'):
2812 initial_dot = 'TRUE'
2813 else:
2814 initial_dot = 'FALSE'
2815 if cookie.expires is not None:
2816 expires = compat_str(cookie.expires)
2817 else:
2818 expires = ''
2819 if cookie.value is None:
2820 # cookies.txt regards 'Set-Cookie: foo' as a cookie
2821 # with no name, whereas http.cookiejar regards it as a
2822 # cookie with no value.
2823 name = ''
2824 value = cookie.name
2825 else:
2826 name = cookie.name
2827 value = cookie.value
2828 f.write(
2829 '\t'.join([cookie.domain, initial_dot, cookie.path,
2830 secure, expires, name, value]) + '\n')
2831
2832 def load(self, filename=None, ignore_discard=False, ignore_expires=False):
2833 """Load cookies from a file."""
2834 if filename is None:
2835 if self.filename is not None:
2836 filename = self.filename
2837 else:
2838 raise ValueError(compat_cookiejar.MISSING_FILENAME_TEXT)
2839
2840 def prepare_line(line):
2841 if line.startswith(self._HTTPONLY_PREFIX):
2842 line = line[len(self._HTTPONLY_PREFIX):]
2843 # comments and empty lines are fine
2844 if line.startswith('#') or not line.strip():
2845 return line
2846 cookie_list = line.split('\t')
2847 if len(cookie_list) != self._ENTRY_LEN:
2848 raise compat_cookiejar.LoadError('invalid length %d' % len(cookie_list))
2849 cookie = self._CookieFileEntry(*cookie_list)
2850 if cookie.expires_at and not cookie.expires_at.isdigit():
2851 raise compat_cookiejar.LoadError('invalid expires at %s' % cookie.expires_at)
2852 return line
2853
2854 cf = io.StringIO()
2855 with io.open(filename, encoding='utf-8') as f:
2856 for line in f:
2857 try:
2858 cf.write(prepare_line(line))
2859 except compat_cookiejar.LoadError as e:
2860 write_string(
2861 'WARNING: skipping cookie file entry due to %s: %r\n'
2862 % (e, line), sys.stderr)
2863 continue
2864 cf.seek(0)
2865 self._really_load(cf, filename, ignore_discard, ignore_expires)
2866 # Session cookies are denoted by either `expires` field set to
2867 # an empty string or 0. MozillaCookieJar only recognizes the former
2868 # (see [1]). So we need force the latter to be recognized as session
2869 # cookies on our own.
2870 # Session cookies may be important for cookies-based authentication,
2871 # e.g. usually, when user does not check 'Remember me' check box while
2872 # logging in on a site, some important cookies are stored as session
2873 # cookies so that not recognizing them will result in failed login.
2874 # 1. https://bugs.python.org/issue17164
2875 for cookie in self:
2876 # Treat `expires=0` cookies as session cookies
2877 if cookie.expires == 0:
2878 cookie.expires = None
2879 cookie.discard = True
2880
2881
2882 class YoutubeDLCookieProcessor(compat_urllib_request.HTTPCookieProcessor):
2883 def __init__(self, cookiejar=None):
2884 compat_urllib_request.HTTPCookieProcessor.__init__(self, cookiejar)
2885
2886 def http_response(self, request, response):
2887 # Python 2 will choke on next HTTP request in row if there are non-ASCII
2888 # characters in Set-Cookie HTTP header of last response (see
2889 # https://github.com/ytdl-org/youtube-dl/issues/6769).
2890 # In order to at least prevent crashing we will percent encode Set-Cookie
2891 # header before HTTPCookieProcessor starts processing it.
2892 # if sys.version_info < (3, 0) and response.headers:
2893 # for set_cookie_header in ('Set-Cookie', 'Set-Cookie2'):
2894 # set_cookie = response.headers.get(set_cookie_header)
2895 # if set_cookie:
2896 # set_cookie_escaped = compat_urllib_parse.quote(set_cookie, b"%/;:@&=+$,!~*'()?#[] ")
2897 # if set_cookie != set_cookie_escaped:
2898 # del response.headers[set_cookie_header]
2899 # response.headers[set_cookie_header] = set_cookie_escaped
2900 return compat_urllib_request.HTTPCookieProcessor.http_response(self, request, response)
2901
2902 https_request = compat_urllib_request.HTTPCookieProcessor.http_request
2903 https_response = http_response
2904
2905
2906 class YoutubeDLRedirectHandler(compat_urllib_request.HTTPRedirectHandler):
2907 if sys.version_info[0] < 3:
2908 def redirect_request(self, req, fp, code, msg, headers, newurl):
2909 # On python 2 urlh.geturl() may sometimes return redirect URL
2910 # as byte string instead of unicode. This workaround allows
2911 # to force it always return unicode.
2912 return compat_urllib_request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, compat_str(newurl))
2913
2914
2915 def extract_timezone(date_str):
2916 m = re.search(
2917 r'^.{8,}?(?P<tz>Z$| ?(?P<sign>\+|-)(?P<hours>[0-9]{2}):?(?P<minutes>[0-9]{2})$)',
2918 date_str)
2919 if not m:
2920 timezone = datetime.timedelta()
2921 else:
2922 date_str = date_str[:-len(m.group('tz'))]
2923 if not m.group('sign'):
2924 timezone = datetime.timedelta()
2925 else:
2926 sign = 1 if m.group('sign') == '+' else -1
2927 timezone = datetime.timedelta(
2928 hours=sign * int(m.group('hours')),
2929 minutes=sign * int(m.group('minutes')))
2930 return timezone, date_str
2931
2932
2933 def parse_iso8601(date_str, delimiter='T', timezone=None):
2934 """ Return a UNIX timestamp from the given date """
2935
2936 if date_str is None:
2937 return None
2938
2939 date_str = re.sub(r'\.[0-9]+', '', date_str)
2940
2941 if timezone is None:
2942 timezone, date_str = extract_timezone(date_str)
2943
2944 try:
2945 date_format = '%Y-%m-%d{0}%H:%M:%S'.format(delimiter)
2946 dt = datetime.datetime.strptime(date_str, date_format) - timezone
2947 return calendar.timegm(dt.timetuple())
2948 except ValueError:
2949 pass
2950
2951
2952 def date_formats(day_first=True):
2953 return DATE_FORMATS_DAY_FIRST if day_first else DATE_FORMATS_MONTH_FIRST
2954
2955
2956 def unified_strdate(date_str, day_first=True):
2957 """Return a string with the date in the format YYYYMMDD"""
2958
2959 if date_str is None:
2960 return None
2961 upload_date = None
2962 # Replace commas
2963 date_str = date_str.replace(',', ' ')
2964 # Remove AM/PM + timezone
2965 date_str = re.sub(r'(?i)\s*(?:AM|PM)(?:\s+[A-Z]+)?', '', date_str)
2966 _, date_str = extract_timezone(date_str)
2967
2968 for expression in date_formats(day_first):
2969 try:
2970 upload_date = datetime.datetime.strptime(date_str, expression).strftime('%Y%m%d')
2971 except ValueError:
2972 pass
2973 if upload_date is None:
2974 timetuple = email.utils.parsedate_tz(date_str)
2975 if timetuple:
2976 try:
2977 upload_date = datetime.datetime(*timetuple[:6]).strftime('%Y%m%d')
2978 except ValueError:
2979 pass
2980 if upload_date is not None:
2981 return compat_str(upload_date)
2982
2983
2984 def unified_timestamp(date_str, day_first=True):
2985 if date_str is None:
2986 return None
2987
2988 date_str = re.sub(r'[,|]', '', date_str)
2989
2990 pm_delta = 12 if re.search(r'(?i)PM', date_str) else 0
2991 timezone, date_str = extract_timezone(date_str)
2992
2993 # Remove AM/PM + timezone
2994 date_str = re.sub(r'(?i)\s*(?:AM|PM)(?:\s+[A-Z]+)?', '', date_str)
2995
2996 # Remove unrecognized timezones from ISO 8601 alike timestamps
2997 m = re.search(r'\d{1,2}:\d{1,2}(?:\.\d+)?(?P<tz>\s*[A-Z]+)$', date_str)
2998 if m:
2999 date_str = date_str[:-len(m.group('tz'))]
3000
3001 # Python only supports microseconds, so remove nanoseconds
3002 m = re.search(r'^([0-9]{4,}-[0-9]{1,2}-[0-9]{1,2}T[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}\.[0-9]{6})[0-9]+$', date_str)
3003 if m:
3004 date_str = m.group(1)
3005
3006 for expression in date_formats(day_first):
3007 try:
3008 dt = datetime.datetime.strptime(date_str, expression) - timezone + datetime.timedelta(hours=pm_delta)
3009 return calendar.timegm(dt.timetuple())
3010 except ValueError:
3011 pass
3012 timetuple = email.utils.parsedate_tz(date_str)
3013 if timetuple:
3014 return calendar.timegm(timetuple) + pm_delta * 3600
3015
3016
3017 def determine_ext(url, default_ext='unknown_video'):
3018 if url is None or '.' not in url:
3019 return default_ext
3020 guess = url.partition('?')[0].rpartition('.')[2]
3021 if re.match(r'^[A-Za-z0-9]+$', guess):
3022 return guess
3023 # Try extract ext from URLs like http://example.com/foo/bar.mp4/?download
3024 elif guess.rstrip('/') in KNOWN_EXTENSIONS:
3025 return guess.rstrip('/')
3026 else:
3027 return default_ext
3028
3029
3030 def subtitles_filename(filename, sub_lang, sub_format, expected_real_ext=None):
3031 return replace_extension(filename, sub_lang + '.' + sub_format, expected_real_ext)
3032
3033
3034 def date_from_str(date_str):
3035 """
3036 Return a datetime object from a string in the format YYYYMMDD or
3037 (now|today)[+-][0-9](day|week|month|year)(s)?"""
3038 today = datetime.date.today()
3039 if date_str in ('now', 'today'):
3040 return today
3041 if date_str == 'yesterday':
3042 return today - datetime.timedelta(days=1)
3043 match = re.match(r'(now|today)(?P<sign>[+-])(?P<time>\d+)(?P<unit>day|week|month|year)(s)?', date_str)
3044 if match is not None:
3045 sign = match.group('sign')
3046 time = int(match.group('time'))
3047 if sign == '-':
3048 time = -time
3049 unit = match.group('unit')
3050 # A bad approximation?
3051 if unit == 'month':
3052 unit = 'day'
3053 time *= 30
3054 elif unit == 'year':
3055 unit = 'day'
3056 time *= 365
3057 unit += 's'
3058 delta = datetime.timedelta(**{unit: time})
3059 return today + delta
3060 return datetime.datetime.strptime(date_str, '%Y%m%d').date()
3061
3062
3063 def hyphenate_date(date_str):
3064 """
3065 Convert a date in 'YYYYMMDD' format to 'YYYY-MM-DD' format"""
3066 match = re.match(r'^(\d\d\d\d)(\d\d)(\d\d)$', date_str)
3067 if match is not None:
3068 return '-'.join(match.groups())
3069 else:
3070 return date_str
3071
3072
3073 class DateRange(object):
3074 """Represents a time interval between two dates"""
3075
3076 def __init__(self, start=None, end=None):
3077 """start and end must be strings in the format accepted by date"""
3078 if start is not None:
3079 self.start = date_from_str(start)
3080 else:
3081 self.start = datetime.datetime.min.date()
3082 if end is not None:
3083 self.end = date_from_str(end)
3084 else:
3085 self.end = datetime.datetime.max.date()
3086 if self.start > self.end:
3087 raise ValueError('Date range: "%s" , the start date must be before the end date' % self)
3088
3089 @classmethod
3090 def day(cls, day):
3091 """Returns a range that only contains the given day"""
3092 return cls(day, day)
3093
3094 def __contains__(self, date):
3095 """Check if the date is in the range"""
3096 if not isinstance(date, datetime.date):
3097 date = date_from_str(date)
3098 return self.start <= date <= self.end
3099
3100 def __str__(self):
3101 return '%s - %s' % (self.start.isoformat(), self.end.isoformat())
3102
3103
3104 def platform_name():
3105 """ Returns the platform name as a compat_str """
3106 res = platform.platform()
3107 if isinstance(res, bytes):
3108 res = res.decode(preferredencoding())
3109
3110 assert isinstance(res, compat_str)
3111 return res
3112
3113
3114 def _windows_write_string(s, out):
3115 """ Returns True if the string was written using special methods,
3116 False if it has yet to be written out."""
3117 # Adapted from http://stackoverflow.com/a/3259271/35070
3118
3119 import ctypes
3120 import ctypes.wintypes
3121
3122 WIN_OUTPUT_IDS = {
3123 1: -11,
3124 2: -12,
3125 }
3126
3127 try:
3128 fileno = out.fileno()
3129 except AttributeError:
3130 # If the output stream doesn't have a fileno, it's virtual
3131 return False
3132 except io.UnsupportedOperation:
3133 # Some strange Windows pseudo files?
3134 return False
3135 if fileno not in WIN_OUTPUT_IDS:
3136 return False
3137
3138 GetStdHandle = compat_ctypes_WINFUNCTYPE(
3139 ctypes.wintypes.HANDLE, ctypes.wintypes.DWORD)(
3140 ('GetStdHandle', ctypes.windll.kernel32))
3141 h = GetStdHandle(WIN_OUTPUT_IDS[fileno])
3142
3143 WriteConsoleW = compat_ctypes_WINFUNCTYPE(
3144 ctypes.wintypes.BOOL, ctypes.wintypes.HANDLE, ctypes.wintypes.LPWSTR,
3145 ctypes.wintypes.DWORD, ctypes.POINTER(ctypes.wintypes.DWORD),
3146 ctypes.wintypes.LPVOID)(('WriteConsoleW', ctypes.windll.kernel32))
3147 written = ctypes.wintypes.DWORD(0)
3148
3149 GetFileType = compat_ctypes_WINFUNCTYPE(ctypes.wintypes.DWORD, ctypes.wintypes.DWORD)(('GetFileType', ctypes.windll.kernel32))
3150 FILE_TYPE_CHAR = 0x0002
3151 FILE_TYPE_REMOTE = 0x8000
3152 GetConsoleMode = compat_ctypes_WINFUNCTYPE(
3153 ctypes.wintypes.BOOL, ctypes.wintypes.HANDLE,
3154 ctypes.POINTER(ctypes.wintypes.DWORD))(
3155 ('GetConsoleMode', ctypes.windll.kernel32))
3156 INVALID_HANDLE_VALUE = ctypes.wintypes.DWORD(-1).value
3157
3158 def not_a_console(handle):
3159 if handle == INVALID_HANDLE_VALUE or handle is None:
3160 return True
3161 return ((GetFileType(handle) & ~FILE_TYPE_REMOTE) != FILE_TYPE_CHAR
3162 or GetConsoleMode(handle, ctypes.byref(ctypes.wintypes.DWORD())) == 0)
3163
3164 if not_a_console(h):
3165 return False
3166
3167 def next_nonbmp_pos(s):
3168 try:
3169 return next(i for i, c in enumerate(s) if ord(c) > 0xffff)
3170 except StopIteration:
3171 return len(s)
3172
3173 while s:
3174 count = min(next_nonbmp_pos(s), 1024)
3175
3176 ret = WriteConsoleW(
3177 h, s, count if count else 2, ctypes.byref(written), None)
3178 if ret == 0:
3179 raise OSError('Failed to write string')
3180 if not count: # We just wrote a non-BMP character
3181 assert written.value == 2
3182 s = s[1:]
3183 else:
3184 assert written.value > 0
3185 s = s[written.value:]
3186 return True
3187
3188
3189 def write_string(s, out=None, encoding=None):
3190 if out is None:
3191 out = sys.stderr
3192 assert type(s) == compat_str
3193
3194 if sys.platform == 'win32' and encoding is None and hasattr(out, 'fileno'):
3195 if _windows_write_string(s, out):
3196 return
3197
3198 if ('b' in getattr(out, 'mode', '')
3199 or sys.version_info[0] < 3): # Python 2 lies about mode of sys.stderr
3200 byt = s.encode(encoding or preferredencoding(), 'ignore')
3201 out.write(byt)
3202 elif hasattr(out, 'buffer'):
3203 enc = encoding or getattr(out, 'encoding', None) or preferredencoding()
3204 byt = s.encode(enc, 'ignore')
3205 out.buffer.write(byt)
3206 else:
3207 out.write(s)
3208 out.flush()
3209
3210
3211 def bytes_to_intlist(bs):
3212 if not bs:
3213 return []
3214 if isinstance(bs[0], int): # Python 3
3215 return list(bs)
3216 else:
3217 return [ord(c) for c in bs]
3218
3219
3220 def intlist_to_bytes(xs):
3221 if not xs:
3222 return b''
3223 return compat_struct_pack('%dB' % len(xs), *xs)
3224
3225
3226 # Cross-platform file locking
3227 if sys.platform == 'win32':
3228 import ctypes.wintypes
3229 import msvcrt
3230
3231 class OVERLAPPED(ctypes.Structure):
3232 _fields_ = [
3233 ('Internal', ctypes.wintypes.LPVOID),
3234 ('InternalHigh', ctypes.wintypes.LPVOID),
3235 ('Offset', ctypes.wintypes.DWORD),
3236 ('OffsetHigh', ctypes.wintypes.DWORD),
3237 ('hEvent', ctypes.wintypes.HANDLE),
3238 ]
3239
3240 kernel32 = ctypes.windll.kernel32
3241 LockFileEx = kernel32.LockFileEx
3242 LockFileEx.argtypes = [
3243 ctypes.wintypes.HANDLE, # hFile
3244 ctypes.wintypes.DWORD, # dwFlags
3245 ctypes.wintypes.DWORD, # dwReserved
3246 ctypes.wintypes.DWORD, # nNumberOfBytesToLockLow
3247 ctypes.wintypes.DWORD, # nNumberOfBytesToLockHigh
3248 ctypes.POINTER(OVERLAPPED) # Overlapped
3249 ]
3250 LockFileEx.restype = ctypes.wintypes.BOOL
3251 UnlockFileEx = kernel32.UnlockFileEx
3252 UnlockFileEx.argtypes = [
3253 ctypes.wintypes.HANDLE, # hFile
3254 ctypes.wintypes.DWORD, # dwReserved
3255 ctypes.wintypes.DWORD, # nNumberOfBytesToLockLow
3256 ctypes.wintypes.DWORD, # nNumberOfBytesToLockHigh
3257 ctypes.POINTER(OVERLAPPED) # Overlapped
3258 ]
3259 UnlockFileEx.restype = ctypes.wintypes.BOOL
3260 whole_low = 0xffffffff
3261 whole_high = 0x7fffffff
3262
3263 def _lock_file(f, exclusive):
3264 overlapped = OVERLAPPED()
3265 overlapped.Offset = 0
3266 overlapped.OffsetHigh = 0
3267 overlapped.hEvent = 0
3268 f._lock_file_overlapped_p = ctypes.pointer(overlapped)
3269 handle = msvcrt.get_osfhandle(f.fileno())
3270 if not LockFileEx(handle, 0x2 if exclusive else 0x0, 0,
3271 whole_low, whole_high, f._lock_file_overlapped_p):
3272 raise OSError('Locking file failed: %r' % ctypes.FormatError())
3273
3274 def _unlock_file(f):
3275 assert f._lock_file_overlapped_p
3276 handle = msvcrt.get_osfhandle(f.fileno())
3277 if not UnlockFileEx(handle, 0,
3278 whole_low, whole_high, f._lock_file_overlapped_p):
3279 raise OSError('Unlocking file failed: %r' % ctypes.FormatError())
3280
3281 else:
3282 # Some platforms, such as Jython, is missing fcntl
3283 try:
3284 import fcntl
3285
3286 def _lock_file(f, exclusive):
3287 fcntl.flock(f, fcntl.LOCK_EX if exclusive else fcntl.LOCK_SH)
3288
3289 def _unlock_file(f):
3290 fcntl.flock(f, fcntl.LOCK_UN)
3291 except ImportError:
3292 UNSUPPORTED_MSG = 'file locking is not supported on this platform'
3293
3294 def _lock_file(f, exclusive):
3295 raise IOError(UNSUPPORTED_MSG)
3296
3297 def _unlock_file(f):
3298 raise IOError(UNSUPPORTED_MSG)
3299
3300
3301 class locked_file(object):
3302 def __init__(self, filename, mode, encoding=None):
3303 assert mode in ['r', 'a', 'w']
3304 self.f = io.open(filename, mode, encoding=encoding)
3305 self.mode = mode
3306
3307 def __enter__(self):
3308 exclusive = self.mode != 'r'
3309 try:
3310 _lock_file(self.f, exclusive)
3311 except IOError:
3312 self.f.close()
3313 raise
3314 return self
3315
3316 def __exit__(self, etype, value, traceback):
3317 try:
3318 _unlock_file(self.f)
3319 finally:
3320 self.f.close()
3321
3322 def __iter__(self):
3323 return iter(self.f)
3324
3325 def write(self, *args):
3326 return self.f.write(*args)
3327
3328 def read(self, *args):
3329 return self.f.read(*args)
3330
3331
3332 def get_filesystem_encoding():
3333 encoding = sys.getfilesystemencoding()
3334 return encoding if encoding is not None else 'utf-8'
3335
3336
3337 def shell_quote(args):
3338 quoted_args = []
3339 encoding = get_filesystem_encoding()
3340 for a in args:
3341 if isinstance(a, bytes):
3342 # We may get a filename encoded with 'encodeFilename'
3343 a = a.decode(encoding)
3344 quoted_args.append(compat_shlex_quote(a))
3345 return ' '.join(quoted_args)
3346
3347
3348 def smuggle_url(url, data):
3349 """ Pass additional data in a URL for internal use. """
3350
3351 url, idata = unsmuggle_url(url, {})
3352 data.update(idata)
3353 sdata = compat_urllib_parse_urlencode(
3354 {'__youtubedl_smuggle': json.dumps(data)})
3355 return url + '#' + sdata
3356
3357
3358 def unsmuggle_url(smug_url, default=None):
3359 if '#__youtubedl_smuggle' not in smug_url:
3360 return smug_url, default
3361 url, _, sdata = smug_url.rpartition('#')
3362 jsond = compat_parse_qs(sdata)['__youtubedl_smuggle'][0]
3363 data = json.loads(jsond)
3364 return url, data
3365
3366
3367 def format_bytes(bytes):
3368 if bytes is None:
3369 return 'N/A'
3370 if type(bytes) is str:
3371 bytes = float(bytes)
3372 if bytes == 0.0:
3373 exponent = 0
3374 else:
3375 exponent = int(math.log(bytes, 1024.0))
3376 suffix = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'][exponent]
3377 converted = float(bytes) / float(1024 ** exponent)
3378 return '%.2f%s' % (converted, suffix)
3379
3380
3381 def lookup_unit_table(unit_table, s):
3382 units_re = '|'.join(re.escape(u) for u in unit_table)
3383 m = re.match(
3384 r'(?P<num>[0-9]+(?:[,.][0-9]*)?)\s*(?P<unit>%s)\b' % units_re, s)
3385 if not m:
3386 return None
3387 num_str = m.group('num').replace(',', '.')
3388 mult = unit_table[m.group('unit')]
3389 return int(float(num_str) * mult)
3390
3391
3392 def parse_filesize(s):
3393 if s is None:
3394 return None
3395
3396 # The lower-case forms are of course incorrect and unofficial,
3397 # but we support those too
3398 _UNIT_TABLE = {
3399 'B': 1,
3400 'b': 1,
3401 'bytes': 1,
3402 'KiB': 1024,
3403 'KB': 1000,
3404 'kB': 1024,
3405 'Kb': 1000,
3406 'kb': 1000,
3407 'kilobytes': 1000,
3408 'kibibytes': 1024,
3409 'MiB': 1024 ** 2,
3410 'MB': 1000 ** 2,
3411 'mB': 1024 ** 2,
3412 'Mb': 1000 ** 2,
3413 'mb': 1000 ** 2,
3414 'megabytes': 1000 ** 2,
3415 'mebibytes': 1024 ** 2,
3416 'GiB': 1024 ** 3,
3417 'GB': 1000 ** 3,
3418 'gB': 1024 ** 3,
3419 'Gb': 1000 ** 3,
3420 'gb': 1000 ** 3,
3421 'gigabytes': 1000 ** 3,
3422 'gibibytes': 1024 ** 3,
3423 'TiB': 1024 ** 4,
3424 'TB': 1000 ** 4,
3425 'tB': 1024 ** 4,
3426 'Tb': 1000 ** 4,
3427 'tb': 1000 ** 4,
3428 'terabytes': 1000 ** 4,
3429 'tebibytes': 1024 ** 4,
3430 'PiB': 1024 ** 5,
3431 'PB': 1000 ** 5,
3432 'pB': 1024 ** 5,
3433 'Pb': 1000 ** 5,
3434 'pb': 1000 ** 5,
3435 'petabytes': 1000 ** 5,
3436 'pebibytes': 1024 ** 5,
3437 'EiB': 1024 ** 6,
3438 'EB': 1000 ** 6,
3439 'eB': 1024 ** 6,
3440 'Eb': 1000 ** 6,
3441 'eb': 1000 ** 6,
3442 'exabytes': 1000 ** 6,
3443 'exbibytes': 1024 ** 6,
3444 'ZiB': 1024 ** 7,
3445 'ZB': 1000 ** 7,
3446 'zB': 1024 ** 7,
3447 'Zb': 1000 ** 7,
3448 'zb': 1000 ** 7,
3449 'zettabytes': 1000 ** 7,
3450 'zebibytes': 1024 ** 7,
3451 'YiB': 1024 ** 8,
3452 'YB': 1000 ** 8,
3453 'yB': 1024 ** 8,
3454 'Yb': 1000 ** 8,
3455 'yb': 1000 ** 8,
3456 'yottabytes': 1000 ** 8,
3457 'yobibytes': 1024 ** 8,
3458 }
3459
3460 return lookup_unit_table(_UNIT_TABLE, s)
3461
3462
3463 def parse_count(s):
3464 if s is None:
3465 return None
3466
3467 s = s.strip()
3468
3469 if re.match(r'^[\d,.]+$', s):
3470 return str_to_int(s)
3471
3472 _UNIT_TABLE = {
3473 'k': 1000,
3474 'K': 1000,
3475 'm': 1000 ** 2,
3476 'M': 1000 ** 2,
3477 'kk': 1000 ** 2,
3478 'KK': 1000 ** 2,
3479 }
3480
3481 return lookup_unit_table(_UNIT_TABLE, s)
3482
3483
3484 def parse_resolution(s):
3485 if s is None:
3486 return {}
3487
3488 mobj = re.search(r'\b(?P<w>\d+)\s*[xX×]\s*(?P<h>\d+)\b', s)
3489 if mobj:
3490 return {
3491 'width': int(mobj.group('w')),
3492 'height': int(mobj.group('h')),
3493 }
3494
3495 mobj = re.search(r'\b(\d+)[pPiI]\b', s)
3496 if mobj:
3497 return {'height': int(mobj.group(1))}
3498
3499 mobj = re.search(r'\b([48])[kK]\b', s)
3500 if mobj:
3501 return {'height': int(mobj.group(1)) * 540}
3502
3503 return {}
3504
3505
3506 def parse_bitrate(s):
3507 if not isinstance(s, compat_str):
3508 return
3509 mobj = re.search(r'\b(\d+)\s*kbps', s)
3510 if mobj:
3511 return int(mobj.group(1))
3512
3513
3514 def month_by_name(name, lang='en'):
3515 """ Return the number of a month by (locale-independently) English name """
3516
3517 month_names = MONTH_NAMES.get(lang, MONTH_NAMES['en'])
3518
3519 try:
3520 return month_names.index(name) + 1
3521 except ValueError:
3522 return None
3523
3524
3525 def month_by_abbreviation(abbrev):
3526 """ Return the number of a month by (locale-independently) English
3527 abbreviations """
3528
3529 try:
3530 return [s[:3] for s in ENGLISH_MONTH_NAMES].index(abbrev) + 1
3531 except ValueError:
3532 return None
3533
3534
3535 def fix_xml_ampersands(xml_str):
3536 """Replace all the '&' by '&amp;' in XML"""
3537 return re.sub(
3538 r'&(?!amp;|lt;|gt;|apos;|quot;|#x[0-9a-fA-F]{,4};|#[0-9]{,4};)',
3539 '&amp;',
3540 xml_str)
3541
3542
3543 def setproctitle(title):
3544 assert isinstance(title, compat_str)
3545
3546 # ctypes in Jython is not complete
3547 # http://bugs.jython.org/issue2148
3548 if sys.platform.startswith('java'):
3549 return
3550
3551 try:
3552 libc = ctypes.cdll.LoadLibrary('libc.so.6')
3553 except OSError:
3554 return
3555 except TypeError:
3556 # LoadLibrary in Windows Python 2.7.13 only expects
3557 # a bytestring, but since unicode_literals turns
3558 # every string into a unicode string, it fails.
3559 return
3560 title_bytes = title.encode('utf-8')
3561 buf = ctypes.create_string_buffer(len(title_bytes))
3562 buf.value = title_bytes
3563 try:
3564 libc.prctl(15, buf, 0, 0, 0)
3565 except AttributeError:
3566 return # Strange libc, just skip this
3567
3568
3569 def remove_start(s, start):
3570 return s[len(start):] if s is not None and s.startswith(start) else s
3571
3572
3573 def remove_end(s, end):
3574 return s[:-len(end)] if s is not None and s.endswith(end) else s
3575
3576
3577 def remove_quotes(s):
3578 if s is None or len(s) < 2:
3579 return s
3580 for quote in ('"', "'", ):
3581 if s[0] == quote and s[-1] == quote:
3582 return s[1:-1]
3583 return s
3584
3585
3586 def get_domain(url):
3587 domain = re.match(r'(?:https?:\/\/)?(?:www\.)?(?P<domain>[^\n\/]+\.[^\n\/]+)(?:\/(.*))?', url)
3588 return domain.group('domain') if domain else None
3589
3590
3591 def url_basename(url):
3592 path = compat_urlparse.urlparse(url).path
3593 return path.strip('/').split('/')[-1]
3594
3595
3596 def base_url(url):
3597 return re.match(r'https?://[^?#&]+/', url).group()
3598
3599
3600 def urljoin(base, path):
3601 if isinstance(path, bytes):
3602 path = path.decode('utf-8')
3603 if not isinstance(path, compat_str) or not path:
3604 return None
3605 if re.match(r'^(?:[a-zA-Z][a-zA-Z0-9+-.]*:)?//', path):
3606 return path
3607 if isinstance(base, bytes):
3608 base = base.decode('utf-8')
3609 if not isinstance(base, compat_str) or not re.match(
3610 r'^(?:https?:)?//', base):
3611 return None
3612 return compat_urlparse.urljoin(base, path)
3613
3614
3615 class HEADRequest(compat_urllib_request.Request):
3616 def get_method(self):
3617 return 'HEAD'
3618
3619
3620 class PUTRequest(compat_urllib_request.Request):
3621 def get_method(self):
3622 return 'PUT'
3623
3624
3625 def int_or_none(v, scale=1, default=None, get_attr=None, invscale=1):
3626 if get_attr:
3627 if v is not None:
3628 v = getattr(v, get_attr, None)
3629 if v == '':
3630 v = None
3631 if v is None:
3632 return default
3633 try:
3634 return int(v) * invscale // scale
3635 except (ValueError, TypeError):
3636 return default
3637
3638
3639 def str_or_none(v, default=None):
3640 return default if v is None else compat_str(v)
3641
3642
3643 def str_to_int(int_str):
3644 """ A more relaxed version of int_or_none """
3645 if isinstance(int_str, compat_integer_types):
3646 return int_str
3647 elif isinstance(int_str, compat_str):
3648 int_str = re.sub(r'[,\.\+]', '', int_str)
3649 return int_or_none(int_str)
3650
3651
3652 def float_or_none(v, scale=1, invscale=1, default=None):
3653 if v is None:
3654 return default
3655 try:
3656 return float(v) * invscale / scale
3657 except (ValueError, TypeError):
3658 return default
3659
3660
3661 def bool_or_none(v, default=None):
3662 return v if isinstance(v, bool) else default
3663
3664
3665 def strip_or_none(v, default=None):
3666 return v.strip() if isinstance(v, compat_str) else default
3667
3668
3669 def url_or_none(url):
3670 if not url or not isinstance(url, compat_str):
3671 return None
3672 url = url.strip()
3673 return url if re.match(r'^(?:(?:https?|rt(?:m(?:pt?[es]?|fp)|sp[su]?)|mms|ftps?):)?//', url) else None
3674
3675
3676 def parse_duration(s):
3677 if not isinstance(s, compat_basestring):
3678 return None
3679
3680 s = s.strip()
3681
3682 days, hours, mins, secs, ms = [None] * 5
3683 m = re.match(r'(?:(?:(?:(?P<days>[0-9]+):)?(?P<hours>[0-9]+):)?(?P<mins>[0-9]+):)?(?P<secs>[0-9]+)(?P<ms>\.[0-9]+)?Z?$', s)
3684 if m:
3685 days, hours, mins, secs, ms = m.groups()
3686 else:
3687 m = re.match(
3688 r'''(?ix)(?:P?
3689 (?:
3690 [0-9]+\s*y(?:ears?)?\s*
3691 )?
3692 (?:
3693 [0-9]+\s*m(?:onths?)?\s*
3694 )?
3695 (?:
3696 [0-9]+\s*w(?:eeks?)?\s*
3697 )?
3698 (?:
3699 (?P<days>[0-9]+)\s*d(?:ays?)?\s*
3700 )?
3701 T)?
3702 (?:
3703 (?P<hours>[0-9]+)\s*h(?:ours?)?\s*
3704 )?
3705 (?:
3706 (?P<mins>[0-9]+)\s*m(?:in(?:ute)?s?)?\s*
3707 )?
3708 (?:
3709 (?P<secs>[0-9]+)(?P<ms>\.[0-9]+)?\s*s(?:ec(?:ond)?s?)?\s*
3710 )?Z?$''', s)
3711 if m:
3712 days, hours, mins, secs, ms = m.groups()
3713 else:
3714 m = re.match(r'(?i)(?:(?P<hours>[0-9.]+)\s*(?:hours?)|(?P<mins>[0-9.]+)\s*(?:mins?\.?|minutes?)\s*)Z?$', s)
3715 if m:
3716 hours, mins = m.groups()
3717 else:
3718 return None
3719
3720 duration = 0
3721 if secs:
3722 duration += float(secs)
3723 if mins:
3724 duration += float(mins) * 60
3725 if hours:
3726 duration += float(hours) * 60 * 60
3727 if days:
3728 duration += float(days) * 24 * 60 * 60
3729 if ms:
3730 duration += float(ms)
3731 return duration
3732
3733
3734 def prepend_extension(filename, ext, expected_real_ext=None):
3735 name, real_ext = os.path.splitext(filename)
3736 return (
3737 '{0}.{1}{2}'.format(name, ext, real_ext)
3738 if not expected_real_ext or real_ext[1:] == expected_real_ext
3739 else '{0}.{1}'.format(filename, ext))
3740
3741
3742 def replace_extension(filename, ext, expected_real_ext=None):
3743 name, real_ext = os.path.splitext(filename)
3744 return '{0}.{1}'.format(
3745 name if not expected_real_ext or real_ext[1:] == expected_real_ext else filename,
3746 ext)
3747
3748
3749 def check_executable(exe, args=[]):
3750 """ Checks if the given binary is installed somewhere in PATH, and returns its name.
3751 args can be a list of arguments for a short output (like -version) """
3752 try:
3753 process_communicate_or_kill(subprocess.Popen(
3754 [exe] + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE))
3755 except OSError:
3756 return False
3757 return exe
3758
3759
3760 def get_exe_version(exe, args=['--version'],
3761 version_re=None, unrecognized='present'):
3762 """ Returns the version of the specified executable,
3763 or False if the executable is not present """
3764 try:
3765 # STDIN should be redirected too. On UNIX-like systems, ffmpeg triggers
3766 # SIGTTOU if youtube-dlc is run in the background.
3767 # See https://github.com/ytdl-org/youtube-dl/issues/955#issuecomment-209789656
3768 out, _ = process_communicate_or_kill(subprocess.Popen(
3769 [encodeArgument(exe)] + args,
3770 stdin=subprocess.PIPE,
3771 stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
3772 except OSError:
3773 return False
3774 if isinstance(out, bytes): # Python 2.x
3775 out = out.decode('ascii', 'ignore')
3776 return detect_exe_version(out, version_re, unrecognized)
3777
3778
3779 def detect_exe_version(output, version_re=None, unrecognized='present'):
3780 assert isinstance(output, compat_str)
3781 if version_re is None:
3782 version_re = r'version\s+([-0-9._a-zA-Z]+)'
3783 m = re.search(version_re, output)
3784 if m:
3785 return m.group(1)
3786 else:
3787 return unrecognized
3788
3789
3790 class PagedList(object):
3791 def __len__(self):
3792 # This is only useful for tests
3793 return len(self.getslice())
3794
3795
3796 class OnDemandPagedList(PagedList):
3797 def __init__(self, pagefunc, pagesize, use_cache=True):
3798 self._pagefunc = pagefunc
3799 self._pagesize = pagesize
3800 self._use_cache = use_cache
3801 if use_cache:
3802 self._cache = {}
3803
3804 def getslice(self, start=0, end=None):
3805 res = []
3806 for pagenum in itertools.count(start // self._pagesize):
3807 firstid = pagenum * self._pagesize
3808 nextfirstid = pagenum * self._pagesize + self._pagesize
3809 if start >= nextfirstid:
3810 continue
3811
3812 page_results = None
3813 if self._use_cache:
3814 page_results = self._cache.get(pagenum)
3815 if page_results is None:
3816 page_results = list(self._pagefunc(pagenum))
3817 if self._use_cache:
3818 self._cache[pagenum] = page_results
3819
3820 startv = (
3821 start % self._pagesize
3822 if firstid <= start < nextfirstid
3823 else 0)
3824
3825 endv = (
3826 ((end - 1) % self._pagesize) + 1
3827 if (end is not None and firstid <= end <= nextfirstid)
3828 else None)
3829
3830 if startv != 0 or endv is not None:
3831 page_results = page_results[startv:endv]
3832 res.extend(page_results)
3833
3834 # A little optimization - if current page is not "full", ie. does
3835 # not contain page_size videos then we can assume that this page
3836 # is the last one - there are no more ids on further pages -
3837 # i.e. no need to query again.
3838 if len(page_results) + startv < self._pagesize:
3839 break
3840
3841 # If we got the whole page, but the next page is not interesting,
3842 # break out early as well
3843 if end == nextfirstid:
3844 break
3845 return res
3846
3847
3848 class InAdvancePagedList(PagedList):
3849 def __init__(self, pagefunc, pagecount, pagesize):
3850 self._pagefunc = pagefunc
3851 self._pagecount = pagecount
3852 self._pagesize = pagesize
3853
3854 def getslice(self, start=0, end=None):
3855 res = []
3856 start_page = start // self._pagesize
3857 end_page = (
3858 self._pagecount if end is None else (end // self._pagesize + 1))
3859 skip_elems = start - start_page * self._pagesize
3860 only_more = None if end is None else end - start
3861 for pagenum in range(start_page, end_page):
3862 page = list(self._pagefunc(pagenum))
3863 if skip_elems:
3864 page = page[skip_elems:]
3865 skip_elems = None
3866 if only_more is not None:
3867 if len(page) < only_more:
3868 only_more -= len(page)
3869 else:
3870 page = page[:only_more]
3871 res.extend(page)
3872 break
3873 res.extend(page)
3874 return res
3875
3876
3877 def uppercase_escape(s):
3878 unicode_escape = codecs.getdecoder('unicode_escape')
3879 return re.sub(
3880 r'\\U[0-9a-fA-F]{8}',
3881 lambda m: unicode_escape(m.group(0))[0],
3882 s)
3883
3884
3885 def lowercase_escape(s):
3886 unicode_escape = codecs.getdecoder('unicode_escape')
3887 return re.sub(
3888 r'\\u[0-9a-fA-F]{4}',
3889 lambda m: unicode_escape(m.group(0))[0],
3890 s)
3891
3892
3893 def escape_rfc3986(s):
3894 """Escape non-ASCII characters as suggested by RFC 3986"""
3895 if sys.version_info < (3, 0) and isinstance(s, compat_str):
3896 s = s.encode('utf-8')
3897 return compat_urllib_parse.quote(s, b"%/;:@&=+$,!~*'()?#[]")
3898
3899
3900 def escape_url(url):
3901 """Escape URL as suggested by RFC 3986"""
3902 url_parsed = compat_urllib_parse_urlparse(url)
3903 return url_parsed._replace(
3904 netloc=url_parsed.netloc.encode('idna').decode('ascii'),
3905 path=escape_rfc3986(url_parsed.path),
3906 params=escape_rfc3986(url_parsed.params),
3907 query=escape_rfc3986(url_parsed.query),
3908 fragment=escape_rfc3986(url_parsed.fragment)
3909 ).geturl()
3910
3911
3912 def read_batch_urls(batch_fd):
3913 def fixup(url):
3914 if not isinstance(url, compat_str):
3915 url = url.decode('utf-8', 'replace')
3916 BOM_UTF8 = ('\xef\xbb\xbf', '\ufeff')
3917 for bom in BOM_UTF8:
3918 if url.startswith(bom):
3919 url = url[len(bom):]
3920 url = url.lstrip()
3921 if not url or url.startswith(('#', ';', ']')):
3922 return False
3923 # "#" cannot be stripped out since it is part of the URI
3924 # However, it can be safely stipped out if follwing a whitespace
3925 return re.split(r'\s#', url, 1)[0].rstrip()
3926
3927 with contextlib.closing(batch_fd) as fd:
3928 return [url for url in map(fixup, fd) if url]
3929
3930
3931 def urlencode_postdata(*args, **kargs):
3932 return compat_urllib_parse_urlencode(*args, **kargs).encode('ascii')
3933
3934
3935 def update_url_query(url, query):
3936 if not query:
3937 return url
3938 parsed_url = compat_urlparse.urlparse(url)
3939 qs = compat_parse_qs(parsed_url.query)
3940 qs.update(query)
3941 return compat_urlparse.urlunparse(parsed_url._replace(
3942 query=compat_urllib_parse_urlencode(qs, True)))
3943
3944
3945 def update_Request(req, url=None, data=None, headers={}, query={}):
3946 req_headers = req.headers.copy()
3947 req_headers.update(headers)
3948 req_data = data or req.data
3949 req_url = update_url_query(url or req.get_full_url(), query)
3950 req_get_method = req.get_method()
3951 if req_get_method == 'HEAD':
3952 req_type = HEADRequest
3953 elif req_get_method == 'PUT':
3954 req_type = PUTRequest
3955 else:
3956 req_type = compat_urllib_request.Request
3957 new_req = req_type(
3958 req_url, data=req_data, headers=req_headers,
3959 origin_req_host=req.origin_req_host, unverifiable=req.unverifiable)
3960 if hasattr(req, 'timeout'):
3961 new_req.timeout = req.timeout
3962 return new_req
3963
3964
3965 def _multipart_encode_impl(data, boundary):
3966 content_type = 'multipart/form-data; boundary=%s' % boundary
3967
3968 out = b''
3969 for k, v in data.items():
3970 out += b'--' + boundary.encode('ascii') + b'\r\n'
3971 if isinstance(k, compat_str):
3972 k = k.encode('utf-8')
3973 if isinstance(v, compat_str):
3974 v = v.encode('utf-8')
3975 # RFC 2047 requires non-ASCII field names to be encoded, while RFC 7578
3976 # suggests sending UTF-8 directly. Firefox sends UTF-8, too
3977 content = b'Content-Disposition: form-data; name="' + k + b'"\r\n\r\n' + v + b'\r\n'
3978 if boundary.encode('ascii') in content:
3979 raise ValueError('Boundary overlaps with data')
3980 out += content
3981
3982 out += b'--' + boundary.encode('ascii') + b'--\r\n'
3983
3984 return out, content_type
3985
3986
3987 def multipart_encode(data, boundary=None):
3988 '''
3989 Encode a dict to RFC 7578-compliant form-data
3990
3991 data:
3992 A dict where keys and values can be either Unicode or bytes-like
3993 objects.
3994 boundary:
3995 If specified a Unicode object, it's used as the boundary. Otherwise
3996 a random boundary is generated.
3997
3998 Reference: https://tools.ietf.org/html/rfc7578
3999 '''
4000 has_specified_boundary = boundary is not None
4001
4002 while True:
4003 if boundary is None:
4004 boundary = '---------------' + str(random.randrange(0x0fffffff, 0xffffffff))
4005
4006 try:
4007 out, content_type = _multipart_encode_impl(data, boundary)
4008 break
4009 except ValueError:
4010 if has_specified_boundary:
4011 raise
4012 boundary = None
4013
4014 return out, content_type
4015
4016
4017 def dict_get(d, key_or_keys, default=None, skip_false_values=True):
4018 if isinstance(key_or_keys, (list, tuple)):
4019 for key in key_or_keys:
4020 if key not in d or d[key] is None or skip_false_values and not d[key]:
4021 continue
4022 return d[key]
4023 return default
4024 return d.get(key_or_keys, default)
4025
4026
4027 def try_get(src, getter, expected_type=None):
4028 if not isinstance(getter, (list, tuple)):
4029 getter = [getter]
4030 for get in getter:
4031 try:
4032 v = get(src)
4033 except (AttributeError, KeyError, TypeError, IndexError):
4034 pass
4035 else:
4036 if expected_type is None or isinstance(v, expected_type):
4037 return v
4038
4039
4040 def merge_dicts(*dicts):
4041 merged = {}
4042 for a_dict in dicts:
4043 for k, v in a_dict.items():
4044 if v is None:
4045 continue
4046 if (k not in merged
4047 or (isinstance(v, compat_str) and v
4048 and isinstance(merged[k], compat_str)
4049 and not merged[k])):
4050 merged[k] = v
4051 return merged
4052
4053
4054 def encode_compat_str(string, encoding=preferredencoding(), errors='strict'):
4055 return string if isinstance(string, compat_str) else compat_str(string, encoding, errors)
4056
4057
4058 US_RATINGS = {
4059 'G': 0,
4060 'PG': 10,
4061 'PG-13': 13,
4062 'R': 16,
4063 'NC': 18,
4064 }
4065
4066
4067 TV_PARENTAL_GUIDELINES = {
4068 'TV-Y': 0,
4069 'TV-Y7': 7,
4070 'TV-G': 0,
4071 'TV-PG': 0,
4072 'TV-14': 14,
4073 'TV-MA': 17,
4074 }
4075
4076
4077 def parse_age_limit(s):
4078 if type(s) == int:
4079 return s if 0 <= s <= 21 else None
4080 if not isinstance(s, compat_basestring):
4081 return None
4082 m = re.match(r'^(?P<age>\d{1,2})\+?$', s)
4083 if m:
4084 return int(m.group('age'))
4085 if s in US_RATINGS:
4086 return US_RATINGS[s]
4087 m = re.match(r'^TV[_-]?(%s)$' % '|'.join(k[3:] for k in TV_PARENTAL_GUIDELINES), s)
4088 if m:
4089 return TV_PARENTAL_GUIDELINES['TV-' + m.group(1)]
4090 return None
4091
4092
4093 def strip_jsonp(code):
4094 return re.sub(
4095 r'''(?sx)^
4096 (?:window\.)?(?P<func_name>[a-zA-Z0-9_.$]*)
4097 (?:\s*&&\s*(?P=func_name))?
4098 \s*\(\s*(?P<callback_data>.*)\);?
4099 \s*?(?://[^\n]*)*$''',
4100 r'\g<callback_data>', code)
4101
4102
4103 def js_to_json(code, vars={}):
4104 # vars is a dict of var, val pairs to substitute
4105 COMMENT_RE = r'/\*(?:(?!\*/).)*?\*/|//[^\n]*'
4106 SKIP_RE = r'\s*(?:{comment})?\s*'.format(comment=COMMENT_RE)
4107 INTEGER_TABLE = (
4108 (r'(?s)^(0[xX][0-9a-fA-F]+){skip}:?$'.format(skip=SKIP_RE), 16),
4109 (r'(?s)^(0+[0-7]+){skip}:?$'.format(skip=SKIP_RE), 8),
4110 )
4111
4112 def fix_kv(m):
4113 v = m.group(0)
4114 if v in ('true', 'false', 'null'):
4115 return v
4116 elif v.startswith('/*') or v.startswith('//') or v.startswith('!') or v == ',':
4117 return ""
4118
4119 if v[0] in ("'", '"'):
4120 v = re.sub(r'(?s)\\.|"', lambda m: {
4121 '"': '\\"',
4122 "\\'": "'",
4123 '\\\n': '',
4124 '\\x': '\\u00',
4125 }.get(m.group(0), m.group(0)), v[1:-1])
4126 else:
4127 for regex, base in INTEGER_TABLE:
4128 im = re.match(regex, v)
4129 if im:
4130 i = int(im.group(1), base)
4131 return '"%d":' % i if v.endswith(':') else '%d' % i
4132
4133 if v in vars:
4134 return vars[v]
4135
4136 return '"%s"' % v
4137
4138 return re.sub(r'''(?sx)
4139 "(?:[^"\\]*(?:\\\\|\\['"nurtbfx/\n]))*[^"\\]*"|
4140 '(?:[^'\\]*(?:\\\\|\\['"nurtbfx/\n]))*[^'\\]*'|
4141 {comment}|,(?={skip}[\]}}])|
4142 (?:(?<![0-9])[eE]|[a-df-zA-DF-Z_])[.a-zA-Z_0-9]*|
4143 \b(?:0[xX][0-9a-fA-F]+|0+[0-7]+)(?:{skip}:)?|
4144 [0-9]+(?={skip}:)|
4145 !+
4146 '''.format(comment=COMMENT_RE, skip=SKIP_RE), fix_kv, code)
4147
4148
4149 def qualities(quality_ids):
4150 """ Get a numeric quality value out of a list of possible values """
4151 def q(qid):
4152 try:
4153 return quality_ids.index(qid)
4154 except ValueError:
4155 return -1
4156 return q
4157
4158
4159 DEFAULT_OUTTMPL = '%(title)s [%(id)s].%(ext)s'
4160
4161
4162 def limit_length(s, length):
4163 """ Add ellipses to overly long strings """
4164 if s is None:
4165 return None
4166 ELLIPSES = '...'
4167 if len(s) > length:
4168 return s[:length - len(ELLIPSES)] + ELLIPSES
4169 return s
4170
4171
4172 def version_tuple(v):
4173 return tuple(int(e) for e in re.split(r'[-.]', v))
4174
4175
4176 def is_outdated_version(version, limit, assume_new=True):
4177 if not version:
4178 return not assume_new
4179 try:
4180 return version_tuple(version) < version_tuple(limit)
4181 except ValueError:
4182 return not assume_new
4183
4184
4185 def ytdl_is_updateable():
4186 """ Returns if youtube-dlc can be updated with -U """
4187 return False
4188
4189 from zipimport import zipimporter
4190
4191 return isinstance(globals().get('__loader__'), zipimporter) or hasattr(sys, 'frozen')
4192
4193
4194 def args_to_str(args):
4195 # Get a short string representation for a subprocess command
4196 return ' '.join(compat_shlex_quote(a) for a in args)
4197
4198
4199 def error_to_compat_str(err):
4200 err_str = str(err)
4201 # On python 2 error byte string must be decoded with proper
4202 # encoding rather than ascii
4203 if sys.version_info[0] < 3:
4204 err_str = err_str.decode(preferredencoding())
4205 return err_str
4206
4207
4208 def mimetype2ext(mt):
4209 if mt is None:
4210 return None
4211
4212 ext = {
4213 'audio/mp4': 'm4a',
4214 # Per RFC 3003, audio/mpeg can be .mp1, .mp2 or .mp3. Here use .mp3 as
4215 # it's the most popular one
4216 'audio/mpeg': 'mp3',
4217 'audio/x-wav': 'wav',
4218 }.get(mt)
4219 if ext is not None:
4220 return ext
4221
4222 _, _, res = mt.rpartition('/')
4223 res = res.split(';')[0].strip().lower()
4224
4225 return {
4226 '3gpp': '3gp',
4227 'smptett+xml': 'tt',
4228 'ttaf+xml': 'dfxp',
4229 'ttml+xml': 'ttml',
4230 'x-flv': 'flv',
4231 'x-mp4-fragmented': 'mp4',
4232 'x-ms-sami': 'sami',
4233 'x-ms-wmv': 'wmv',
4234 'mpegurl': 'm3u8',
4235 'x-mpegurl': 'm3u8',
4236 'vnd.apple.mpegurl': 'm3u8',
4237 'dash+xml': 'mpd',
4238 'f4m+xml': 'f4m',
4239 'hds+xml': 'f4m',
4240 'vnd.ms-sstr+xml': 'ism',
4241 'quicktime': 'mov',
4242 'mp2t': 'ts',
4243 'x-wav': 'wav',
4244 }.get(res, res)
4245
4246
4247 def parse_codecs(codecs_str):
4248 # http://tools.ietf.org/html/rfc6381
4249 if not codecs_str:
4250 return {}
4251 split_codecs = list(filter(None, map(
4252 lambda str: str.strip(), codecs_str.strip().strip(',').split(','))))
4253 vcodec, acodec = None, None
4254 for full_codec in split_codecs:
4255 codec = full_codec.split('.')[0]
4256 if codec in ('avc1', 'avc2', 'avc3', 'avc4', 'vp9', 'vp8', 'hev1', 'hev2', 'h263', 'h264', 'mp4v', 'hvc1', 'av01', 'theora'):
4257 if not vcodec:
4258 vcodec = full_codec
4259 elif codec in ('mp4a', 'opus', 'vorbis', 'mp3', 'aac', 'ac-3', 'ec-3', 'eac3', 'dtsc', 'dtse', 'dtsh', 'dtsl'):
4260 if not acodec:
4261 acodec = full_codec
4262 else:
4263 write_string('WARNING: Unknown codec %s\n' % full_codec, sys.stderr)
4264 if not vcodec and not acodec:
4265 if len(split_codecs) == 2:
4266 return {
4267 'vcodec': split_codecs[0],
4268 'acodec': split_codecs[1],
4269 }
4270 else:
4271 return {
4272 'vcodec': vcodec or 'none',
4273 'acodec': acodec or 'none',
4274 }
4275 return {}
4276
4277
4278 def urlhandle_detect_ext(url_handle):
4279 getheader = url_handle.headers.get
4280
4281 cd = getheader('Content-Disposition')
4282 if cd:
4283 m = re.match(r'attachment;\s*filename="(?P<filename>[^"]+)"', cd)
4284 if m:
4285 e = determine_ext(m.group('filename'), default_ext=None)
4286 if e:
4287 return e
4288
4289 return mimetype2ext(getheader('Content-Type'))
4290
4291
4292 def encode_data_uri(data, mime_type):
4293 return 'data:%s;base64,%s' % (mime_type, base64.b64encode(data).decode('ascii'))
4294
4295
4296 def age_restricted(content_limit, age_limit):
4297 """ Returns True iff the content should be blocked """
4298
4299 if age_limit is None: # No limit set
4300 return False
4301 if content_limit is None:
4302 return False # Content available for everyone
4303 return age_limit < content_limit
4304
4305
4306 def is_html(first_bytes):
4307 """ Detect whether a file contains HTML by examining its first bytes. """
4308
4309 BOMS = [
4310 (b'\xef\xbb\xbf', 'utf-8'),
4311 (b'\x00\x00\xfe\xff', 'utf-32-be'),
4312 (b'\xff\xfe\x00\x00', 'utf-32-le'),
4313 (b'\xff\xfe', 'utf-16-le'),
4314 (b'\xfe\xff', 'utf-16-be'),
4315 ]
4316 for bom, enc in BOMS:
4317 if first_bytes.startswith(bom):
4318 s = first_bytes[len(bom):].decode(enc, 'replace')
4319 break
4320 else:
4321 s = first_bytes.decode('utf-8', 'replace')
4322
4323 return re.match(r'^\s*<', s)
4324
4325
4326 def determine_protocol(info_dict):
4327 protocol = info_dict.get('protocol')
4328 if protocol is not None:
4329 return protocol
4330
4331 url = info_dict['url']
4332 if url.startswith('rtmp'):
4333 return 'rtmp'
4334 elif url.startswith('mms'):
4335 return 'mms'
4336 elif url.startswith('rtsp'):
4337 return 'rtsp'
4338
4339 ext = determine_ext(url)
4340 if ext == 'm3u8':
4341 return 'm3u8'
4342 elif ext == 'f4m':
4343 return 'f4m'
4344
4345 return compat_urllib_parse_urlparse(url).scheme
4346
4347
4348 def render_table(header_row, data, delim=False, extraGap=0, hideEmpty=False):
4349 """ Render a list of rows, each as a list of values """
4350
4351 def get_max_lens(table):
4352 return [max(len(compat_str(v)) for v in col) for col in zip(*table)]
4353
4354 def filter_using_list(row, filterArray):
4355 return [col for (take, col) in zip(filterArray, row) if take]
4356
4357 if hideEmpty:
4358 max_lens = get_max_lens(data)
4359 header_row = filter_using_list(header_row, max_lens)
4360 data = [filter_using_list(row, max_lens) for row in data]
4361
4362 table = [header_row] + data
4363 max_lens = get_max_lens(table)
4364 if delim:
4365 table = [header_row] + [['-' * ml for ml in max_lens]] + data
4366 format_str = ' '.join('%-' + compat_str(ml + extraGap) + 's' for ml in max_lens[:-1]) + ' %s'
4367 return '\n'.join(format_str % tuple(row) for row in table)
4368
4369
4370 def _match_one(filter_part, dct):
4371 COMPARISON_OPERATORS = {
4372 '<': operator.lt,
4373 '<=': operator.le,
4374 '>': operator.gt,
4375 '>=': operator.ge,
4376 '=': operator.eq,
4377 '!=': operator.ne,
4378 }
4379 operator_rex = re.compile(r'''(?x)\s*
4380 (?P<key>[a-z_]+)
4381 \s*(?P<op>%s)(?P<none_inclusive>\s*\?)?\s*
4382 (?:
4383 (?P<intval>[0-9.]+(?:[kKmMgGtTpPeEzZyY]i?[Bb]?)?)|
4384 (?P<quote>["\'])(?P<quotedstrval>(?:\\.|(?!(?P=quote)|\\).)+?)(?P=quote)|
4385 (?P<strval>(?![0-9.])[a-z0-9A-Z]*)
4386 )
4387 \s*$
4388 ''' % '|'.join(map(re.escape, COMPARISON_OPERATORS.keys())))
4389 m = operator_rex.search(filter_part)
4390 if m:
4391 op = COMPARISON_OPERATORS[m.group('op')]
4392 actual_value = dct.get(m.group('key'))
4393 if (m.group('quotedstrval') is not None
4394 or m.group('strval') is not None
4395 # If the original field is a string and matching comparisonvalue is
4396 # a number we should respect the origin of the original field
4397 # and process comparison value as a string (see
4398 # https://github.com/ytdl-org/youtube-dl/issues/11082).
4399 or actual_value is not None and m.group('intval') is not None
4400 and isinstance(actual_value, compat_str)):
4401 if m.group('op') not in ('=', '!='):
4402 raise ValueError(
4403 'Operator %s does not support string values!' % m.group('op'))
4404 comparison_value = m.group('quotedstrval') or m.group('strval') or m.group('intval')
4405 quote = m.group('quote')
4406 if quote is not None:
4407 comparison_value = comparison_value.replace(r'\%s' % quote, quote)
4408 else:
4409 try:
4410 comparison_value = int(m.group('intval'))
4411 except ValueError:
4412 comparison_value = parse_filesize(m.group('intval'))
4413 if comparison_value is None:
4414 comparison_value = parse_filesize(m.group('intval') + 'B')
4415 if comparison_value is None:
4416 raise ValueError(
4417 'Invalid integer value %r in filter part %r' % (
4418 m.group('intval'), filter_part))
4419 if actual_value is None:
4420 return m.group('none_inclusive')
4421 return op(actual_value, comparison_value)
4422
4423 UNARY_OPERATORS = {
4424 '': lambda v: (v is True) if isinstance(v, bool) else (v is not None),
4425 '!': lambda v: (v is False) if isinstance(v, bool) else (v is None),
4426 }
4427 operator_rex = re.compile(r'''(?x)\s*
4428 (?P<op>%s)\s*(?P<key>[a-z_]+)
4429 \s*$
4430 ''' % '|'.join(map(re.escape, UNARY_OPERATORS.keys())))
4431 m = operator_rex.search(filter_part)
4432 if m:
4433 op = UNARY_OPERATORS[m.group('op')]
4434 actual_value = dct.get(m.group('key'))
4435 return op(actual_value)
4436
4437 raise ValueError('Invalid filter part %r' % filter_part)
4438
4439
4440 def match_str(filter_str, dct):
4441 """ Filter a dictionary with a simple string syntax. Returns True (=passes filter) or false """
4442
4443 return all(
4444 _match_one(filter_part, dct) for filter_part in filter_str.split('&'))
4445
4446
4447 def match_filter_func(filter_str):
4448 def _match_func(info_dict):
4449 if match_str(filter_str, info_dict):
4450 return None
4451 else:
4452 video_title = info_dict.get('title', info_dict.get('id', 'video'))
4453 return '%s does not pass filter %s, skipping ..' % (video_title, filter_str)
4454 return _match_func
4455
4456
4457 def parse_dfxp_time_expr(time_expr):
4458 if not time_expr:
4459 return
4460
4461 mobj = re.match(r'^(?P<time_offset>\d+(?:\.\d+)?)s?$', time_expr)
4462 if mobj:
4463 return float(mobj.group('time_offset'))
4464
4465 mobj = re.match(r'^(\d+):(\d\d):(\d\d(?:(?:\.|:)\d+)?)$', time_expr)
4466 if mobj:
4467 return 3600 * int(mobj.group(1)) + 60 * int(mobj.group(2)) + float(mobj.group(3).replace(':', '.'))
4468
4469
4470 def srt_subtitles_timecode(seconds):
4471 return '%02d:%02d:%02d,%03d' % (seconds / 3600, (seconds % 3600) / 60, seconds % 60, (seconds % 1) * 1000)
4472
4473
4474 def dfxp2srt(dfxp_data):
4475 '''
4476 @param dfxp_data A bytes-like object containing DFXP data
4477 @returns A unicode object containing converted SRT data
4478 '''
4479 LEGACY_NAMESPACES = (
4480 (b'http://www.w3.org/ns/ttml', [
4481 b'http://www.w3.org/2004/11/ttaf1',
4482 b'http://www.w3.org/2006/04/ttaf1',
4483 b'http://www.w3.org/2006/10/ttaf1',
4484 ]),
4485 (b'http://www.w3.org/ns/ttml#styling', [
4486 b'http://www.w3.org/ns/ttml#style',
4487 ]),
4488 )
4489
4490 SUPPORTED_STYLING = [
4491 'color',
4492 'fontFamily',
4493 'fontSize',
4494 'fontStyle',
4495 'fontWeight',
4496 'textDecoration'
4497 ]
4498
4499 _x = functools.partial(xpath_with_ns, ns_map={
4500 'xml': 'http://www.w3.org/XML/1998/namespace',
4501 'ttml': 'http://www.w3.org/ns/ttml',
4502 'tts': 'http://www.w3.org/ns/ttml#styling',
4503 })
4504
4505 styles = {}
4506 default_style = {}
4507
4508 class TTMLPElementParser(object):
4509 _out = ''
4510 _unclosed_elements = []
4511 _applied_styles = []
4512
4513 def start(self, tag, attrib):
4514 if tag in (_x('ttml:br'), 'br'):
4515 self._out += '\n'
4516 else:
4517 unclosed_elements = []
4518 style = {}
4519 element_style_id = attrib.get('style')
4520 if default_style:
4521 style.update(default_style)
4522 if element_style_id:
4523 style.update(styles.get(element_style_id, {}))
4524 for prop in SUPPORTED_STYLING:
4525 prop_val = attrib.get(_x('tts:' + prop))
4526 if prop_val:
4527 style[prop] = prop_val
4528 if style:
4529 font = ''
4530 for k, v in sorted(style.items()):
4531 if self._applied_styles and self._applied_styles[-1].get(k) == v:
4532 continue
4533 if k == 'color':
4534 font += ' color="%s"' % v
4535 elif k == 'fontSize':
4536 font += ' size="%s"' % v
4537 elif k == 'fontFamily':
4538 font += ' face="%s"' % v
4539 elif k == 'fontWeight' and v == 'bold':
4540 self._out += '<b>'
4541 unclosed_elements.append('b')
4542 elif k == 'fontStyle' and v == 'italic':
4543 self._out += '<i>'
4544 unclosed_elements.append('i')
4545 elif k == 'textDecoration' and v == 'underline':
4546 self._out += '<u>'
4547 unclosed_elements.append('u')
4548 if font:
4549 self._out += '<font' + font + '>'
4550 unclosed_elements.append('font')
4551 applied_style = {}
4552 if self._applied_styles:
4553 applied_style.update(self._applied_styles[-1])
4554 applied_style.update(style)
4555 self._applied_styles.append(applied_style)
4556 self._unclosed_elements.append(unclosed_elements)
4557
4558 def end(self, tag):
4559 if tag not in (_x('ttml:br'), 'br'):
4560 unclosed_elements = self._unclosed_elements.pop()
4561 for element in reversed(unclosed_elements):
4562 self._out += '</%s>' % element
4563 if unclosed_elements and self._applied_styles:
4564 self._applied_styles.pop()
4565
4566 def data(self, data):
4567 self._out += data
4568
4569 def close(self):
4570 return self._out.strip()
4571
4572 def parse_node(node):
4573 target = TTMLPElementParser()
4574 parser = xml.etree.ElementTree.XMLParser(target=target)
4575 parser.feed(xml.etree.ElementTree.tostring(node))
4576 return parser.close()
4577
4578 for k, v in LEGACY_NAMESPACES:
4579 for ns in v:
4580 dfxp_data = dfxp_data.replace(ns, k)
4581
4582 dfxp = compat_etree_fromstring(dfxp_data)
4583 out = []
4584 paras = dfxp.findall(_x('.//ttml:p')) or dfxp.findall('.//p')
4585
4586 if not paras:
4587 raise ValueError('Invalid dfxp/TTML subtitle')
4588
4589 repeat = False
4590 while True:
4591 for style in dfxp.findall(_x('.//ttml:style')):
4592 style_id = style.get('id') or style.get(_x('xml:id'))
4593 if not style_id:
4594 continue
4595 parent_style_id = style.get('style')
4596 if parent_style_id:
4597 if parent_style_id not in styles:
4598 repeat = True
4599 continue
4600 styles[style_id] = styles[parent_style_id].copy()
4601 for prop in SUPPORTED_STYLING:
4602 prop_val = style.get(_x('tts:' + prop))
4603 if prop_val:
4604 styles.setdefault(style_id, {})[prop] = prop_val
4605 if repeat:
4606 repeat = False
4607 else:
4608 break
4609
4610 for p in ('body', 'div'):
4611 ele = xpath_element(dfxp, [_x('.//ttml:' + p), './/' + p])
4612 if ele is None:
4613 continue
4614 style = styles.get(ele.get('style'))
4615 if not style:
4616 continue
4617 default_style.update(style)
4618
4619 for para, index in zip(paras, itertools.count(1)):
4620 begin_time = parse_dfxp_time_expr(para.attrib.get('begin'))
4621 end_time = parse_dfxp_time_expr(para.attrib.get('end'))
4622 dur = parse_dfxp_time_expr(para.attrib.get('dur'))
4623 if begin_time is None:
4624 continue
4625 if not end_time:
4626 if not dur:
4627 continue
4628 end_time = begin_time + dur
4629 out.append('%d\n%s --> %s\n%s\n\n' % (
4630 index,
4631 srt_subtitles_timecode(begin_time),
4632 srt_subtitles_timecode(end_time),
4633 parse_node(para)))
4634
4635 return ''.join(out)
4636
4637
4638 def cli_option(params, command_option, param):
4639 param = params.get(param)
4640 if param:
4641 param = compat_str(param)
4642 return [command_option, param] if param is not None else []
4643
4644
4645 def cli_bool_option(params, command_option, param, true_value='true', false_value='false', separator=None):
4646 param = params.get(param)
4647 if param is None:
4648 return []
4649 assert isinstance(param, bool)
4650 if separator:
4651 return [command_option + separator + (true_value if param else false_value)]
4652 return [command_option, true_value if param else false_value]
4653
4654
4655 def cli_valueless_option(params, command_option, param, expected_value=True):
4656 param = params.get(param)
4657 return [command_option] if param == expected_value else []
4658
4659
4660 def cli_configuration_args(params, arg_name, key, default=[], exe=None): # returns arg, for_compat
4661 argdict = params.get(arg_name, {})
4662 if isinstance(argdict, (list, tuple)): # for backward compatibility
4663 return argdict, True
4664
4665 if argdict is None:
4666 return default, False
4667 assert isinstance(argdict, dict)
4668
4669 assert isinstance(key, compat_str)
4670 key = key.lower()
4671
4672 args = exe_args = None
4673 if exe is not None:
4674 assert isinstance(exe, compat_str)
4675 exe = exe.lower()
4676 args = argdict.get('%s+%s' % (key, exe))
4677 if args is None:
4678 exe_args = argdict.get(exe)
4679
4680 if args is None:
4681 args = argdict.get(key) if key != exe else None
4682 if args is None and exe_args is None:
4683 args = argdict.get('default', default)
4684
4685 args, exe_args = args or [], exe_args or []
4686 assert isinstance(args, (list, tuple))
4687 assert isinstance(exe_args, (list, tuple))
4688 return args + exe_args, False
4689
4690
4691 class ISO639Utils(object):
4692 # See http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt
4693 _lang_map = {
4694 'aa': 'aar',
4695 'ab': 'abk',
4696 'ae': 'ave',
4697 'af': 'afr',
4698 'ak': 'aka',
4699 'am': 'amh',
4700 'an': 'arg',
4701 'ar': 'ara',
4702 'as': 'asm',
4703 'av': 'ava',
4704 'ay': 'aym',
4705 'az': 'aze',
4706 'ba': 'bak',
4707 'be': 'bel',
4708 'bg': 'bul',
4709 'bh': 'bih',
4710 'bi': 'bis',
4711 'bm': 'bam',
4712 'bn': 'ben',
4713 'bo': 'bod',
4714 'br': 'bre',
4715 'bs': 'bos',
4716 'ca': 'cat',
4717 'ce': 'che',
4718 'ch': 'cha',
4719 'co': 'cos',
4720 'cr': 'cre',
4721 'cs': 'ces',
4722 'cu': 'chu',
4723 'cv': 'chv',
4724 'cy': 'cym',
4725 'da': 'dan',
4726 'de': 'deu',
4727 'dv': 'div',
4728 'dz': 'dzo',
4729 'ee': 'ewe',
4730 'el': 'ell',
4731 'en': 'eng',
4732 'eo': 'epo',
4733 'es': 'spa',
4734 'et': 'est',
4735 'eu': 'eus',
4736 'fa': 'fas',
4737 'ff': 'ful',
4738 'fi': 'fin',
4739 'fj': 'fij',
4740 'fo': 'fao',
4741 'fr': 'fra',
4742 'fy': 'fry',
4743 'ga': 'gle',
4744 'gd': 'gla',
4745 'gl': 'glg',
4746 'gn': 'grn',
4747 'gu': 'guj',
4748 'gv': 'glv',
4749 'ha': 'hau',
4750 'he': 'heb',
4751 'iw': 'heb', # Replaced by he in 1989 revision
4752 'hi': 'hin',
4753 'ho': 'hmo',
4754 'hr': 'hrv',
4755 'ht': 'hat',
4756 'hu': 'hun',
4757 'hy': 'hye',
4758 'hz': 'her',
4759 'ia': 'ina',
4760 'id': 'ind',
4761 'in': 'ind', # Replaced by id in 1989 revision
4762 'ie': 'ile',
4763 'ig': 'ibo',
4764 'ii': 'iii',
4765 'ik': 'ipk',
4766 'io': 'ido',
4767 'is': 'isl',
4768 'it': 'ita',
4769 'iu': 'iku',
4770 'ja': 'jpn',
4771 'jv': 'jav',
4772 'ka': 'kat',
4773 'kg': 'kon',
4774 'ki': 'kik',
4775 'kj': 'kua',
4776 'kk': 'kaz',
4777 'kl': 'kal',
4778 'km': 'khm',
4779 'kn': 'kan',
4780 'ko': 'kor',
4781 'kr': 'kau',
4782 'ks': 'kas',
4783 'ku': 'kur',
4784 'kv': 'kom',
4785 'kw': 'cor',
4786 'ky': 'kir',
4787 'la': 'lat',
4788 'lb': 'ltz',
4789 'lg': 'lug',
4790 'li': 'lim',
4791 'ln': 'lin',
4792 'lo': 'lao',
4793 'lt': 'lit',
4794 'lu': 'lub',
4795 'lv': 'lav',
4796 'mg': 'mlg',
4797 'mh': 'mah',
4798 'mi': 'mri',
4799 'mk': 'mkd',
4800 'ml': 'mal',
4801 'mn': 'mon',
4802 'mr': 'mar',
4803 'ms': 'msa',
4804 'mt': 'mlt',
4805 'my': 'mya',
4806 'na': 'nau',
4807 'nb': 'nob',
4808 'nd': 'nde',
4809 'ne': 'nep',
4810 'ng': 'ndo',
4811 'nl': 'nld',
4812 'nn': 'nno',
4813 'no': 'nor',
4814 'nr': 'nbl',
4815 'nv': 'nav',
4816 'ny': 'nya',
4817 'oc': 'oci',
4818 'oj': 'oji',
4819 'om': 'orm',
4820 'or': 'ori',
4821 'os': 'oss',
4822 'pa': 'pan',
4823 'pi': 'pli',
4824 'pl': 'pol',
4825 'ps': 'pus',
4826 'pt': 'por',
4827 'qu': 'que',
4828 'rm': 'roh',
4829 'rn': 'run',
4830 'ro': 'ron',
4831 'ru': 'rus',
4832 'rw': 'kin',
4833 'sa': 'san',
4834 'sc': 'srd',
4835 'sd': 'snd',
4836 'se': 'sme',
4837 'sg': 'sag',
4838 'si': 'sin',
4839 'sk': 'slk',
4840 'sl': 'slv',
4841 'sm': 'smo',
4842 'sn': 'sna',
4843 'so': 'som',
4844 'sq': 'sqi',
4845 'sr': 'srp',
4846 'ss': 'ssw',
4847 'st': 'sot',
4848 'su': 'sun',
4849 'sv': 'swe',
4850 'sw': 'swa',
4851 'ta': 'tam',
4852 'te': 'tel',
4853 'tg': 'tgk',
4854 'th': 'tha',
4855 'ti': 'tir',
4856 'tk': 'tuk',
4857 'tl': 'tgl',
4858 'tn': 'tsn',
4859 'to': 'ton',
4860 'tr': 'tur',
4861 'ts': 'tso',
4862 'tt': 'tat',
4863 'tw': 'twi',
4864 'ty': 'tah',
4865 'ug': 'uig',
4866 'uk': 'ukr',
4867 'ur': 'urd',
4868 'uz': 'uzb',
4869 've': 'ven',
4870 'vi': 'vie',
4871 'vo': 'vol',
4872 'wa': 'wln',
4873 'wo': 'wol',
4874 'xh': 'xho',
4875 'yi': 'yid',
4876 'ji': 'yid', # Replaced by yi in 1989 revision
4877 'yo': 'yor',
4878 'za': 'zha',
4879 'zh': 'zho',
4880 'zu': 'zul',
4881 }
4882
4883 @classmethod
4884 def short2long(cls, code):
4885 """Convert language code from ISO 639-1 to ISO 639-2/T"""
4886 return cls._lang_map.get(code[:2])
4887
4888 @classmethod
4889 def long2short(cls, code):
4890 """Convert language code from ISO 639-2/T to ISO 639-1"""
4891 for short_name, long_name in cls._lang_map.items():
4892 if long_name == code:
4893 return short_name
4894
4895
4896 class ISO3166Utils(object):
4897 # From http://data.okfn.org/data/core/country-list
4898 _country_map = {
4899 'AF': 'Afghanistan',
4900 'AX': 'Åland Islands',
4901 'AL': 'Albania',
4902 'DZ': 'Algeria',
4903 'AS': 'American Samoa',
4904 'AD': 'Andorra',
4905 'AO': 'Angola',
4906 'AI': 'Anguilla',
4907 'AQ': 'Antarctica',
4908 'AG': 'Antigua and Barbuda',
4909 'AR': 'Argentina',
4910 'AM': 'Armenia',
4911 'AW': 'Aruba',
4912 'AU': 'Australia',
4913 'AT': 'Austria',
4914 'AZ': 'Azerbaijan',
4915 'BS': 'Bahamas',
4916 'BH': 'Bahrain',
4917 'BD': 'Bangladesh',
4918 'BB': 'Barbados',
4919 'BY': 'Belarus',
4920 'BE': 'Belgium',
4921 'BZ': 'Belize',
4922 'BJ': 'Benin',
4923 'BM': 'Bermuda',
4924 'BT': 'Bhutan',
4925 'BO': 'Bolivia, Plurinational State of',
4926 'BQ': 'Bonaire, Sint Eustatius and Saba',
4927 'BA': 'Bosnia and Herzegovina',
4928 'BW': 'Botswana',
4929 'BV': 'Bouvet Island',
4930 'BR': 'Brazil',
4931 'IO': 'British Indian Ocean Territory',
4932 'BN': 'Brunei Darussalam',
4933 'BG': 'Bulgaria',
4934 'BF': 'Burkina Faso',
4935 'BI': 'Burundi',
4936 'KH': 'Cambodia',
4937 'CM': 'Cameroon',
4938 'CA': 'Canada',
4939 'CV': 'Cape Verde',
4940 'KY': 'Cayman Islands',
4941 'CF': 'Central African Republic',
4942 'TD': 'Chad',
4943 'CL': 'Chile',
4944 'CN': 'China',
4945 'CX': 'Christmas Island',
4946 'CC': 'Cocos (Keeling) Islands',
4947 'CO': 'Colombia',
4948 'KM': 'Comoros',
4949 'CG': 'Congo',
4950 'CD': 'Congo, the Democratic Republic of the',
4951 'CK': 'Cook Islands',
4952 'CR': 'Costa Rica',
4953 'CI': 'Côte d\'Ivoire',
4954 'HR': 'Croatia',
4955 'CU': 'Cuba',
4956 'CW': 'Curaçao',
4957 'CY': 'Cyprus',
4958 'CZ': 'Czech Republic',
4959 'DK': 'Denmark',
4960 'DJ': 'Djibouti',
4961 'DM': 'Dominica',
4962 'DO': 'Dominican Republic',
4963 'EC': 'Ecuador',
4964 'EG': 'Egypt',
4965 'SV': 'El Salvador',
4966 'GQ': 'Equatorial Guinea',
4967 'ER': 'Eritrea',
4968 'EE': 'Estonia',
4969 'ET': 'Ethiopia',
4970 'FK': 'Falkland Islands (Malvinas)',
4971 'FO': 'Faroe Islands',
4972 'FJ': 'Fiji',
4973 'FI': 'Finland',
4974 'FR': 'France',
4975 'GF': 'French Guiana',
4976 'PF': 'French Polynesia',
4977 'TF': 'French Southern Territories',
4978 'GA': 'Gabon',
4979 'GM': 'Gambia',
4980 'GE': 'Georgia',
4981 'DE': 'Germany',
4982 'GH': 'Ghana',
4983 'GI': 'Gibraltar',
4984 'GR': 'Greece',
4985 'GL': 'Greenland',
4986 'GD': 'Grenada',
4987 'GP': 'Guadeloupe',
4988 'GU': 'Guam',
4989 'GT': 'Guatemala',
4990 'GG': 'Guernsey',
4991 'GN': 'Guinea',
4992 'GW': 'Guinea-Bissau',
4993 'GY': 'Guyana',
4994 'HT': 'Haiti',
4995 'HM': 'Heard Island and McDonald Islands',
4996 'VA': 'Holy See (Vatican City State)',
4997 'HN': 'Honduras',
4998 'HK': 'Hong Kong',
4999 'HU': 'Hungary',
5000 'IS': 'Iceland',
5001 'IN': 'India',
5002 'ID': 'Indonesia',
5003 'IR': 'Iran, Islamic Republic of',
5004 'IQ': 'Iraq',
5005 'IE': 'Ireland',
5006 'IM': 'Isle of Man',
5007 'IL': 'Israel',
5008 'IT': 'Italy',
5009 'JM': 'Jamaica',
5010 'JP': 'Japan',
5011 'JE': 'Jersey',
5012 'JO': 'Jordan',
5013 'KZ': 'Kazakhstan',
5014 'KE': 'Kenya',
5015 'KI': 'Kiribati',
5016 'KP': 'Korea, Democratic People\'s Republic of',
5017 'KR': 'Korea, Republic of',
5018 'KW': 'Kuwait',
5019 'KG': 'Kyrgyzstan',
5020 'LA': 'Lao People\'s Democratic Republic',
5021 'LV': 'Latvia',
5022 'LB': 'Lebanon',
5023 'LS': 'Lesotho',
5024 'LR': 'Liberia',
5025 'LY': 'Libya',
5026 'LI': 'Liechtenstein',
5027 'LT': 'Lithuania',
5028 'LU': 'Luxembourg',
5029 'MO': 'Macao',
5030 'MK': 'Macedonia, the Former Yugoslav Republic of',
5031 'MG': 'Madagascar',
5032 'MW': 'Malawi',
5033 'MY': 'Malaysia',
5034 'MV': 'Maldives',
5035 'ML': 'Mali',
5036 'MT': 'Malta',
5037 'MH': 'Marshall Islands',
5038 'MQ': 'Martinique',
5039 'MR': 'Mauritania',
5040 'MU': 'Mauritius',
5041 'YT': 'Mayotte',
5042 'MX': 'Mexico',
5043 'FM': 'Micronesia, Federated States of',
5044 'MD': 'Moldova, Republic of',
5045 'MC': 'Monaco',
5046 'MN': 'Mongolia',
5047 'ME': 'Montenegro',
5048 'MS': 'Montserrat',
5049 'MA': 'Morocco',
5050 'MZ': 'Mozambique',
5051 'MM': 'Myanmar',
5052 'NA': 'Namibia',
5053 'NR': 'Nauru',
5054 'NP': 'Nepal',
5055 'NL': 'Netherlands',
5056 'NC': 'New Caledonia',
5057 'NZ': 'New Zealand',
5058 'NI': 'Nicaragua',
5059 'NE': 'Niger',
5060 'NG': 'Nigeria',
5061 'NU': 'Niue',
5062 'NF': 'Norfolk Island',
5063 'MP': 'Northern Mariana Islands',
5064 'NO': 'Norway',
5065 'OM': 'Oman',
5066 'PK': 'Pakistan',
5067 'PW': 'Palau',
5068 'PS': 'Palestine, State of',
5069 'PA': 'Panama',
5070 'PG': 'Papua New Guinea',
5071 'PY': 'Paraguay',
5072 'PE': 'Peru',
5073 'PH': 'Philippines',
5074 'PN': 'Pitcairn',
5075 'PL': 'Poland',
5076 'PT': 'Portugal',
5077 'PR': 'Puerto Rico',
5078 'QA': 'Qatar',
5079 'RE': 'Réunion',
5080 'RO': 'Romania',
5081 'RU': 'Russian Federation',
5082 'RW': 'Rwanda',
5083 'BL': 'Saint Barthélemy',
5084 'SH': 'Saint Helena, Ascension and Tristan da Cunha',
5085 'KN': 'Saint Kitts and Nevis',
5086 'LC': 'Saint Lucia',
5087 'MF': 'Saint Martin (French part)',
5088 'PM': 'Saint Pierre and Miquelon',
5089 'VC': 'Saint Vincent and the Grenadines',
5090 'WS': 'Samoa',
5091 'SM': 'San Marino',
5092 'ST': 'Sao Tome and Principe',
5093 'SA': 'Saudi Arabia',
5094 'SN': 'Senegal',
5095 'RS': 'Serbia',
5096 'SC': 'Seychelles',
5097 'SL': 'Sierra Leone',
5098 'SG': 'Singapore',
5099 'SX': 'Sint Maarten (Dutch part)',
5100 'SK': 'Slovakia',
5101 'SI': 'Slovenia',
5102 'SB': 'Solomon Islands',
5103 'SO': 'Somalia',
5104 'ZA': 'South Africa',
5105 'GS': 'South Georgia and the South Sandwich Islands',
5106 'SS': 'South Sudan',
5107 'ES': 'Spain',
5108 'LK': 'Sri Lanka',
5109 'SD': 'Sudan',
5110 'SR': 'Suriname',
5111 'SJ': 'Svalbard and Jan Mayen',
5112 'SZ': 'Swaziland',
5113 'SE': 'Sweden',
5114 'CH': 'Switzerland',
5115 'SY': 'Syrian Arab Republic',
5116 'TW': 'Taiwan, Province of China',
5117 'TJ': 'Tajikistan',
5118 'TZ': 'Tanzania, United Republic of',
5119 'TH': 'Thailand',
5120 'TL': 'Timor-Leste',
5121 'TG': 'Togo',
5122 'TK': 'Tokelau',
5123 'TO': 'Tonga',
5124 'TT': 'Trinidad and Tobago',
5125 'TN': 'Tunisia',
5126 'TR': 'Turkey',
5127 'TM': 'Turkmenistan',
5128 'TC': 'Turks and Caicos Islands',
5129 'TV': 'Tuvalu',
5130 'UG': 'Uganda',
5131 'UA': 'Ukraine',
5132 'AE': 'United Arab Emirates',
5133 'GB': 'United Kingdom',
5134 'US': 'United States',
5135 'UM': 'United States Minor Outlying Islands',
5136 'UY': 'Uruguay',
5137 'UZ': 'Uzbekistan',
5138 'VU': 'Vanuatu',
5139 'VE': 'Venezuela, Bolivarian Republic of',
5140 'VN': 'Viet Nam',
5141 'VG': 'Virgin Islands, British',
5142 'VI': 'Virgin Islands, U.S.',
5143 'WF': 'Wallis and Futuna',
5144 'EH': 'Western Sahara',
5145 'YE': 'Yemen',
5146 'ZM': 'Zambia',
5147 'ZW': 'Zimbabwe',
5148 }
5149
5150 @classmethod
5151 def short2full(cls, code):
5152 """Convert an ISO 3166-2 country code to the corresponding full name"""
5153 return cls._country_map.get(code.upper())
5154
5155
5156 class GeoUtils(object):
5157 # Major IPv4 address blocks per country
5158 _country_ip_map = {
5159 'AD': '46.172.224.0/19',
5160 'AE': '94.200.0.0/13',
5161 'AF': '149.54.0.0/17',
5162 'AG': '209.59.64.0/18',
5163 'AI': '204.14.248.0/21',
5164 'AL': '46.99.0.0/16',
5165 'AM': '46.70.0.0/15',
5166 'AO': '105.168.0.0/13',
5167 'AP': '182.50.184.0/21',
5168 'AQ': '23.154.160.0/24',
5169 'AR': '181.0.0.0/12',
5170 'AS': '202.70.112.0/20',
5171 'AT': '77.116.0.0/14',
5172 'AU': '1.128.0.0/11',
5173 'AW': '181.41.0.0/18',
5174 'AX': '185.217.4.0/22',
5175 'AZ': '5.197.0.0/16',
5176 'BA': '31.176.128.0/17',
5177 'BB': '65.48.128.0/17',
5178 'BD': '114.130.0.0/16',
5179 'BE': '57.0.0.0/8',
5180 'BF': '102.178.0.0/15',
5181 'BG': '95.42.0.0/15',
5182 'BH': '37.131.0.0/17',
5183 'BI': '154.117.192.0/18',
5184 'BJ': '137.255.0.0/16',
5185 'BL': '185.212.72.0/23',
5186 'BM': '196.12.64.0/18',
5187 'BN': '156.31.0.0/16',
5188 'BO': '161.56.0.0/16',
5189 'BQ': '161.0.80.0/20',
5190 'BR': '191.128.0.0/12',
5191 'BS': '24.51.64.0/18',
5192 'BT': '119.2.96.0/19',
5193 'BW': '168.167.0.0/16',
5194 'BY': '178.120.0.0/13',
5195 'BZ': '179.42.192.0/18',
5196 'CA': '99.224.0.0/11',
5197 'CD': '41.243.0.0/16',
5198 'CF': '197.242.176.0/21',
5199 'CG': '160.113.0.0/16',
5200 'CH': '85.0.0.0/13',
5201 'CI': '102.136.0.0/14',
5202 'CK': '202.65.32.0/19',
5203 'CL': '152.172.0.0/14',
5204 'CM': '102.244.0.0/14',
5205 'CN': '36.128.0.0/10',
5206 'CO': '181.240.0.0/12',
5207 'CR': '201.192.0.0/12',
5208 'CU': '152.206.0.0/15',
5209 'CV': '165.90.96.0/19',
5210 'CW': '190.88.128.0/17',
5211 'CY': '31.153.0.0/16',
5212 'CZ': '88.100.0.0/14',
5213 'DE': '53.0.0.0/8',
5214 'DJ': '197.241.0.0/17',
5215 'DK': '87.48.0.0/12',
5216 'DM': '192.243.48.0/20',
5217 'DO': '152.166.0.0/15',
5218 'DZ': '41.96.0.0/12',
5219 'EC': '186.68.0.0/15',
5220 'EE': '90.190.0.0/15',
5221 'EG': '156.160.0.0/11',
5222 'ER': '196.200.96.0/20',
5223 'ES': '88.0.0.0/11',
5224 'ET': '196.188.0.0/14',
5225 'EU': '2.16.0.0/13',
5226 'FI': '91.152.0.0/13',
5227 'FJ': '144.120.0.0/16',
5228 'FK': '80.73.208.0/21',
5229 'FM': '119.252.112.0/20',
5230 'FO': '88.85.32.0/19',
5231 'FR': '90.0.0.0/9',
5232 'GA': '41.158.0.0/15',
5233 'GB': '25.0.0.0/8',
5234 'GD': '74.122.88.0/21',
5235 'GE': '31.146.0.0/16',
5236 'GF': '161.22.64.0/18',
5237 'GG': '62.68.160.0/19',
5238 'GH': '154.160.0.0/12',
5239 'GI': '95.164.0.0/16',
5240 'GL': '88.83.0.0/19',
5241 'GM': '160.182.0.0/15',
5242 'GN': '197.149.192.0/18',
5243 'GP': '104.250.0.0/19',
5244 'GQ': '105.235.224.0/20',
5245 'GR': '94.64.0.0/13',
5246 'GT': '168.234.0.0/16',
5247 'GU': '168.123.0.0/16',
5248 'GW': '197.214.80.0/20',
5249 'GY': '181.41.64.0/18',
5250 'HK': '113.252.0.0/14',
5251 'HN': '181.210.0.0/16',
5252 'HR': '93.136.0.0/13',
5253 'HT': '148.102.128.0/17',
5254 'HU': '84.0.0.0/14',
5255 'ID': '39.192.0.0/10',
5256 'IE': '87.32.0.0/12',
5257 'IL': '79.176.0.0/13',
5258 'IM': '5.62.80.0/20',
5259 'IN': '117.192.0.0/10',
5260 'IO': '203.83.48.0/21',
5261 'IQ': '37.236.0.0/14',
5262 'IR': '2.176.0.0/12',
5263 'IS': '82.221.0.0/16',
5264 'IT': '79.0.0.0/10',
5265 'JE': '87.244.64.0/18',
5266 'JM': '72.27.0.0/17',
5267 'JO': '176.29.0.0/16',
5268 'JP': '133.0.0.0/8',
5269 'KE': '105.48.0.0/12',
5270 'KG': '158.181.128.0/17',
5271 'KH': '36.37.128.0/17',
5272 'KI': '103.25.140.0/22',
5273 'KM': '197.255.224.0/20',
5274 'KN': '198.167.192.0/19',
5275 'KP': '175.45.176.0/22',
5276 'KR': '175.192.0.0/10',
5277 'KW': '37.36.0.0/14',
5278 'KY': '64.96.0.0/15',
5279 'KZ': '2.72.0.0/13',
5280 'LA': '115.84.64.0/18',
5281 'LB': '178.135.0.0/16',
5282 'LC': '24.92.144.0/20',
5283 'LI': '82.117.0.0/19',
5284 'LK': '112.134.0.0/15',
5285 'LR': '102.183.0.0/16',
5286 'LS': '129.232.0.0/17',
5287 'LT': '78.56.0.0/13',
5288 'LU': '188.42.0.0/16',
5289 'LV': '46.109.0.0/16',
5290 'LY': '41.252.0.0/14',
5291 'MA': '105.128.0.0/11',
5292 'MC': '88.209.64.0/18',
5293 'MD': '37.246.0.0/16',
5294 'ME': '178.175.0.0/17',
5295 'MF': '74.112.232.0/21',
5296 'MG': '154.126.0.0/17',
5297 'MH': '117.103.88.0/21',
5298 'MK': '77.28.0.0/15',
5299 'ML': '154.118.128.0/18',
5300 'MM': '37.111.0.0/17',
5301 'MN': '49.0.128.0/17',
5302 'MO': '60.246.0.0/16',
5303 'MP': '202.88.64.0/20',
5304 'MQ': '109.203.224.0/19',
5305 'MR': '41.188.64.0/18',
5306 'MS': '208.90.112.0/22',
5307 'MT': '46.11.0.0/16',
5308 'MU': '105.16.0.0/12',
5309 'MV': '27.114.128.0/18',
5310 'MW': '102.70.0.0/15',
5311 'MX': '187.192.0.0/11',
5312 'MY': '175.136.0.0/13',
5313 'MZ': '197.218.0.0/15',
5314 'NA': '41.182.0.0/16',
5315 'NC': '101.101.0.0/18',
5316 'NE': '197.214.0.0/18',
5317 'NF': '203.17.240.0/22',
5318 'NG': '105.112.0.0/12',
5319 'NI': '186.76.0.0/15',
5320 'NL': '145.96.0.0/11',
5321 'NO': '84.208.0.0/13',
5322 'NP': '36.252.0.0/15',
5323 'NR': '203.98.224.0/19',
5324 'NU': '49.156.48.0/22',
5325 'NZ': '49.224.0.0/14',
5326 'OM': '5.36.0.0/15',
5327 'PA': '186.72.0.0/15',
5328 'PE': '186.160.0.0/14',
5329 'PF': '123.50.64.0/18',
5330 'PG': '124.240.192.0/19',
5331 'PH': '49.144.0.0/13',
5332 'PK': '39.32.0.0/11',
5333 'PL': '83.0.0.0/11',
5334 'PM': '70.36.0.0/20',
5335 'PR': '66.50.0.0/16',
5336 'PS': '188.161.0.0/16',
5337 'PT': '85.240.0.0/13',
5338 'PW': '202.124.224.0/20',
5339 'PY': '181.120.0.0/14',
5340 'QA': '37.210.0.0/15',
5341 'RE': '102.35.0.0/16',
5342 'RO': '79.112.0.0/13',
5343 'RS': '93.86.0.0/15',
5344 'RU': '5.136.0.0/13',
5345 'RW': '41.186.0.0/16',
5346 'SA': '188.48.0.0/13',
5347 'SB': '202.1.160.0/19',
5348 'SC': '154.192.0.0/11',
5349 'SD': '102.120.0.0/13',
5350 'SE': '78.64.0.0/12',
5351 'SG': '8.128.0.0/10',
5352 'SI': '188.196.0.0/14',
5353 'SK': '78.98.0.0/15',
5354 'SL': '102.143.0.0/17',
5355 'SM': '89.186.32.0/19',
5356 'SN': '41.82.0.0/15',
5357 'SO': '154.115.192.0/18',
5358 'SR': '186.179.128.0/17',
5359 'SS': '105.235.208.0/21',
5360 'ST': '197.159.160.0/19',
5361 'SV': '168.243.0.0/16',
5362 'SX': '190.102.0.0/20',
5363 'SY': '5.0.0.0/16',
5364 'SZ': '41.84.224.0/19',
5365 'TC': '65.255.48.0/20',
5366 'TD': '154.68.128.0/19',
5367 'TG': '196.168.0.0/14',
5368 'TH': '171.96.0.0/13',
5369 'TJ': '85.9.128.0/18',
5370 'TK': '27.96.24.0/21',
5371 'TL': '180.189.160.0/20',
5372 'TM': '95.85.96.0/19',
5373 'TN': '197.0.0.0/11',
5374 'TO': '175.176.144.0/21',
5375 'TR': '78.160.0.0/11',
5376 'TT': '186.44.0.0/15',
5377 'TV': '202.2.96.0/19',
5378 'TW': '120.96.0.0/11',
5379 'TZ': '156.156.0.0/14',
5380 'UA': '37.52.0.0/14',
5381 'UG': '102.80.0.0/13',
5382 'US': '6.0.0.0/8',
5383 'UY': '167.56.0.0/13',
5384 'UZ': '84.54.64.0/18',
5385 'VA': '212.77.0.0/19',
5386 'VC': '207.191.240.0/21',
5387 'VE': '186.88.0.0/13',
5388 'VG': '66.81.192.0/20',
5389 'VI': '146.226.0.0/16',
5390 'VN': '14.160.0.0/11',
5391 'VU': '202.80.32.0/20',
5392 'WF': '117.20.32.0/21',
5393 'WS': '202.4.32.0/19',
5394 'YE': '134.35.0.0/16',
5395 'YT': '41.242.116.0/22',
5396 'ZA': '41.0.0.0/11',
5397 'ZM': '102.144.0.0/13',
5398 'ZW': '102.177.192.0/18',
5399 }
5400
5401 @classmethod
5402 def random_ipv4(cls, code_or_block):
5403 if len(code_or_block) == 2:
5404 block = cls._country_ip_map.get(code_or_block.upper())
5405 if not block:
5406 return None
5407 else:
5408 block = code_or_block
5409 addr, preflen = block.split('/')
5410 addr_min = compat_struct_unpack('!L', socket.inet_aton(addr))[0]
5411 addr_max = addr_min | (0xffffffff >> int(preflen))
5412 return compat_str(socket.inet_ntoa(
5413 compat_struct_pack('!L', random.randint(addr_min, addr_max))))
5414
5415
5416 class PerRequestProxyHandler(compat_urllib_request.ProxyHandler):
5417 def __init__(self, proxies=None):
5418 # Set default handlers
5419 for type in ('http', 'https'):
5420 setattr(self, '%s_open' % type,
5421 lambda r, proxy='__noproxy__', type=type, meth=self.proxy_open:
5422 meth(r, proxy, type))
5423 compat_urllib_request.ProxyHandler.__init__(self, proxies)
5424
5425 def proxy_open(self, req, proxy, type):
5426 req_proxy = req.headers.get('Ytdl-request-proxy')
5427 if req_proxy is not None:
5428 proxy = req_proxy
5429 del req.headers['Ytdl-request-proxy']
5430
5431 if proxy == '__noproxy__':
5432 return None # No Proxy
5433 if compat_urlparse.urlparse(proxy).scheme.lower() in ('socks', 'socks4', 'socks4a', 'socks5'):
5434 req.add_header('Ytdl-socks-proxy', proxy)
5435 # youtube-dlc's http/https handlers do wrapping the socket with socks
5436 return None
5437 return compat_urllib_request.ProxyHandler.proxy_open(
5438 self, req, proxy, type)
5439
5440
5441 # Both long_to_bytes and bytes_to_long are adapted from PyCrypto, which is
5442 # released into Public Domain
5443 # https://github.com/dlitz/pycrypto/blob/master/lib/Crypto/Util/number.py#L387
5444
5445 def long_to_bytes(n, blocksize=0):
5446 """long_to_bytes(n:long, blocksize:int) : string
5447 Convert a long integer to a byte string.
5448
5449 If optional blocksize is given and greater than zero, pad the front of the
5450 byte string with binary zeros so that the length is a multiple of
5451 blocksize.
5452 """
5453 # after much testing, this algorithm was deemed to be the fastest
5454 s = b''
5455 n = int(n)
5456 while n > 0:
5457 s = compat_struct_pack('>I', n & 0xffffffff) + s
5458 n = n >> 32
5459 # strip off leading zeros
5460 for i in range(len(s)):
5461 if s[i] != b'\000'[0]:
5462 break
5463 else:
5464 # only happens when n == 0
5465 s = b'\000'
5466 i = 0
5467 s = s[i:]
5468 # add back some pad bytes. this could be done more efficiently w.r.t. the
5469 # de-padding being done above, but sigh...
5470 if blocksize > 0 and len(s) % blocksize:
5471 s = (blocksize - len(s) % blocksize) * b'\000' + s
5472 return s
5473
5474
5475 def bytes_to_long(s):
5476 """bytes_to_long(string) : long
5477 Convert a byte string to a long integer.
5478
5479 This is (essentially) the inverse of long_to_bytes().
5480 """
5481 acc = 0
5482 length = len(s)
5483 if length % 4:
5484 extra = (4 - length % 4)
5485 s = b'\000' * extra + s
5486 length = length + extra
5487 for i in range(0, length, 4):
5488 acc = (acc << 32) + compat_struct_unpack('>I', s[i:i + 4])[0]
5489 return acc
5490
5491
5492 def ohdave_rsa_encrypt(data, exponent, modulus):
5493 '''
5494 Implement OHDave's RSA algorithm. See http://www.ohdave.com/rsa/
5495
5496 Input:
5497 data: data to encrypt, bytes-like object
5498 exponent, modulus: parameter e and N of RSA algorithm, both integer
5499 Output: hex string of encrypted data
5500
5501 Limitation: supports one block encryption only
5502 '''
5503
5504 payload = int(binascii.hexlify(data[::-1]), 16)
5505 encrypted = pow(payload, exponent, modulus)
5506 return '%x' % encrypted
5507
5508
5509 def pkcs1pad(data, length):
5510 """
5511 Padding input data with PKCS#1 scheme
5512
5513 @param {int[]} data input data
5514 @param {int} length target length
5515 @returns {int[]} padded data
5516 """
5517 if len(data) > length - 11:
5518 raise ValueError('Input data too long for PKCS#1 padding')
5519
5520 pseudo_random = [random.randint(0, 254) for _ in range(length - len(data) - 3)]
5521 return [0, 2] + pseudo_random + [0] + data
5522
5523
5524 def encode_base_n(num, n, table=None):
5525 FULL_TABLE = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
5526 if not table:
5527 table = FULL_TABLE[:n]
5528
5529 if n > len(table):
5530 raise ValueError('base %d exceeds table length %d' % (n, len(table)))
5531
5532 if num == 0:
5533 return table[0]
5534
5535 ret = ''
5536 while num:
5537 ret = table[num % n] + ret
5538 num = num // n
5539 return ret
5540
5541
5542 def decode_packed_codes(code):
5543 mobj = re.search(PACKED_CODES_RE, code)
5544 obfuscated_code, base, count, symbols = mobj.groups()
5545 base = int(base)
5546 count = int(count)
5547 symbols = symbols.split('|')
5548 symbol_table = {}
5549
5550 while count:
5551 count -= 1
5552 base_n_count = encode_base_n(count, base)
5553 symbol_table[base_n_count] = symbols[count] or base_n_count
5554
5555 return re.sub(
5556 r'\b(\w+)\b', lambda mobj: symbol_table[mobj.group(0)],
5557 obfuscated_code)
5558
5559
5560 def caesar(s, alphabet, shift):
5561 if shift == 0:
5562 return s
5563 l = len(alphabet)
5564 return ''.join(
5565 alphabet[(alphabet.index(c) + shift) % l] if c in alphabet else c
5566 for c in s)
5567
5568
5569 def rot47(s):
5570 return caesar(s, r'''!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~''', 47)
5571
5572
5573 def parse_m3u8_attributes(attrib):
5574 info = {}
5575 for (key, val) in re.findall(r'(?P<key>[A-Z0-9-]+)=(?P<val>"[^"]+"|[^",]+)(?:,|$)', attrib):
5576 if val.startswith('"'):
5577 val = val[1:-1]
5578 info[key] = val
5579 return info
5580
5581
5582 def urshift(val, n):
5583 return val >> n if val >= 0 else (val + 0x100000000) >> n
5584
5585
5586 # Based on png2str() written by @gdkchan and improved by @yokrysty
5587 # Originally posted at https://github.com/ytdl-org/youtube-dl/issues/9706
5588 def decode_png(png_data):
5589 # Reference: https://www.w3.org/TR/PNG/
5590 header = png_data[8:]
5591
5592 if png_data[:8] != b'\x89PNG\x0d\x0a\x1a\x0a' or header[4:8] != b'IHDR':
5593 raise IOError('Not a valid PNG file.')
5594
5595 int_map = {1: '>B', 2: '>H', 4: '>I'}
5596 unpack_integer = lambda x: compat_struct_unpack(int_map[len(x)], x)[0]
5597
5598 chunks = []
5599
5600 while header:
5601 length = unpack_integer(header[:4])
5602 header = header[4:]
5603
5604 chunk_type = header[:4]
5605 header = header[4:]
5606
5607 chunk_data = header[:length]
5608 header = header[length:]
5609
5610 header = header[4:] # Skip CRC
5611
5612 chunks.append({
5613 'type': chunk_type,
5614 'length': length,
5615 'data': chunk_data
5616 })
5617
5618 ihdr = chunks[0]['data']
5619
5620 width = unpack_integer(ihdr[:4])
5621 height = unpack_integer(ihdr[4:8])
5622
5623 idat = b''
5624
5625 for chunk in chunks:
5626 if chunk['type'] == b'IDAT':
5627 idat += chunk['data']
5628
5629 if not idat:
5630 raise IOError('Unable to read PNG data.')
5631
5632 decompressed_data = bytearray(zlib.decompress(idat))
5633
5634 stride = width * 3
5635 pixels = []
5636
5637 def _get_pixel(idx):
5638 x = idx % stride
5639 y = idx // stride
5640 return pixels[y][x]
5641
5642 for y in range(height):
5643 basePos = y * (1 + stride)
5644 filter_type = decompressed_data[basePos]
5645
5646 current_row = []
5647
5648 pixels.append(current_row)
5649
5650 for x in range(stride):
5651 color = decompressed_data[1 + basePos + x]
5652 basex = y * stride + x
5653 left = 0
5654 up = 0
5655
5656 if x > 2:
5657 left = _get_pixel(basex - 3)
5658 if y > 0:
5659 up = _get_pixel(basex - stride)
5660
5661 if filter_type == 1: # Sub
5662 color = (color + left) & 0xff
5663 elif filter_type == 2: # Up
5664 color = (color + up) & 0xff
5665 elif filter_type == 3: # Average
5666 color = (color + ((left + up) >> 1)) & 0xff
5667 elif filter_type == 4: # Paeth
5668 a = left
5669 b = up
5670 c = 0
5671
5672 if x > 2 and y > 0:
5673 c = _get_pixel(basex - stride - 3)
5674
5675 p = a + b - c
5676
5677 pa = abs(p - a)
5678 pb = abs(p - b)
5679 pc = abs(p - c)
5680
5681 if pa <= pb and pa <= pc:
5682 color = (color + a) & 0xff
5683 elif pb <= pc:
5684 color = (color + b) & 0xff
5685 else:
5686 color = (color + c) & 0xff
5687
5688 current_row.append(color)
5689
5690 return width, height, pixels
5691
5692
5693 def write_xattr(path, key, value):
5694 # This mess below finds the best xattr tool for the job
5695 try:
5696 # try the pyxattr module...
5697 import xattr
5698
5699 if hasattr(xattr, 'set'): # pyxattr
5700 # Unicode arguments are not supported in python-pyxattr until
5701 # version 0.5.0
5702 # See https://github.com/ytdl-org/youtube-dl/issues/5498
5703 pyxattr_required_version = '0.5.0'
5704 if version_tuple(xattr.__version__) < version_tuple(pyxattr_required_version):
5705 # TODO: fallback to CLI tools
5706 raise XAttrUnavailableError(
5707 'python-pyxattr is detected but is too old. '
5708 'youtube-dlc requires %s or above while your version is %s. '
5709 'Falling back to other xattr implementations' % (
5710 pyxattr_required_version, xattr.__version__))
5711
5712 setxattr = xattr.set
5713 else: # xattr
5714 setxattr = xattr.setxattr
5715
5716 try:
5717 setxattr(path, key, value)
5718 except EnvironmentError as e:
5719 raise XAttrMetadataError(e.errno, e.strerror)
5720
5721 except ImportError:
5722 if compat_os_name == 'nt':
5723 # Write xattrs to NTFS Alternate Data Streams:
5724 # http://en.wikipedia.org/wiki/NTFS#Alternate_data_streams_.28ADS.29
5725 assert ':' not in key
5726 assert os.path.exists(path)
5727
5728 ads_fn = path + ':' + key
5729 try:
5730 with open(ads_fn, 'wb') as f:
5731 f.write(value)
5732 except EnvironmentError as e:
5733 raise XAttrMetadataError(e.errno, e.strerror)
5734 else:
5735 user_has_setfattr = check_executable('setfattr', ['--version'])
5736 user_has_xattr = check_executable('xattr', ['-h'])
5737
5738 if user_has_setfattr or user_has_xattr:
5739
5740 value = value.decode('utf-8')
5741 if user_has_setfattr:
5742 executable = 'setfattr'
5743 opts = ['-n', key, '-v', value]
5744 elif user_has_xattr:
5745 executable = 'xattr'
5746 opts = ['-w', key, value]
5747
5748 cmd = ([encodeFilename(executable, True)]
5749 + [encodeArgument(o) for o in opts]
5750 + [encodeFilename(path, True)])
5751
5752 try:
5753 p = subprocess.Popen(
5754 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
5755 except EnvironmentError as e:
5756 raise XAttrMetadataError(e.errno, e.strerror)
5757 stdout, stderr = process_communicate_or_kill(p)
5758 stderr = stderr.decode('utf-8', 'replace')
5759 if p.returncode != 0:
5760 raise XAttrMetadataError(p.returncode, stderr)
5761
5762 else:
5763 # On Unix, and can't find pyxattr, setfattr, or xattr.
5764 if sys.platform.startswith('linux'):
5765 raise XAttrUnavailableError(
5766 "Couldn't find a tool to set the xattrs. "
5767 "Install either the python 'pyxattr' or 'xattr' "
5768 "modules, or the GNU 'attr' package "
5769 "(which contains the 'setfattr' tool).")
5770 else:
5771 raise XAttrUnavailableError(
5772 "Couldn't find a tool to set the xattrs. "
5773 "Install either the python 'xattr' module, "
5774 "or the 'xattr' binary.")
5775
5776
5777 def random_birthday(year_field, month_field, day_field):
5778 start_date = datetime.date(1950, 1, 1)
5779 end_date = datetime.date(1995, 12, 31)
5780 offset = random.randint(0, (end_date - start_date).days)
5781 random_date = start_date + datetime.timedelta(offset)
5782 return {
5783 year_field: str(random_date.year),
5784 month_field: str(random_date.month),
5785 day_field: str(random_date.day),
5786 }
5787
5788
5789 # Templates for internet shortcut files, which are plain text files.
5790 DOT_URL_LINK_TEMPLATE = '''
5791 [InternetShortcut]
5792 URL=%(url)s
5793 '''.lstrip()
5794
5795 DOT_WEBLOC_LINK_TEMPLATE = '''
5796 <?xml version="1.0" encoding="UTF-8"?>
5797 <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
5798 <plist version="1.0">
5799 <dict>
5800 \t<key>URL</key>
5801 \t<string>%(url)s</string>
5802 </dict>
5803 </plist>
5804 '''.lstrip()
5805
5806 DOT_DESKTOP_LINK_TEMPLATE = '''
5807 [Desktop Entry]
5808 Encoding=UTF-8
5809 Name=%(filename)s
5810 Type=Link
5811 URL=%(url)s
5812 Icon=text-html
5813 '''.lstrip()
5814
5815
5816 def iri_to_uri(iri):
5817 """
5818 Converts an IRI (Internationalized Resource Identifier, allowing Unicode characters) to a URI (Uniform Resource Identifier, ASCII-only).
5819
5820 The function doesn't add an additional layer of escaping; e.g., it doesn't escape `%3C` as `%253C`. Instead, it percent-escapes characters with an underlying UTF-8 encoding *besides* those already escaped, leaving the URI intact.
5821 """
5822
5823 iri_parts = compat_urllib_parse_urlparse(iri)
5824
5825 if '[' in iri_parts.netloc:
5826 raise ValueError('IPv6 URIs are not, yet, supported.')
5827 # Querying `.netloc`, when there's only one bracket, also raises a ValueError.
5828
5829 # The `safe` argument values, that the following code uses, contain the characters that should not be percent-encoded. Everything else but letters, digits and '_.-' will be percent-encoded with an underlying UTF-8 encoding. Everything already percent-encoded will be left as is.
5830
5831 net_location = ''
5832 if iri_parts.username:
5833 net_location += compat_urllib_parse_quote(iri_parts.username, safe=r"!$%&'()*+,~")
5834 if iri_parts.password is not None:
5835 net_location += ':' + compat_urllib_parse_quote(iri_parts.password, safe=r"!$%&'()*+,~")
5836 net_location += '@'
5837
5838 net_location += iri_parts.hostname.encode('idna').decode('utf-8') # Punycode for Unicode hostnames.
5839 # The 'idna' encoding produces ASCII text.
5840 if iri_parts.port is not None and iri_parts.port != 80:
5841 net_location += ':' + str(iri_parts.port)
5842
5843 return compat_urllib_parse_urlunparse(
5844 (iri_parts.scheme,
5845 net_location,
5846
5847 compat_urllib_parse_quote_plus(iri_parts.path, safe=r"!$%&'()*+,/:;=@|~"),
5848
5849 # Unsure about the `safe` argument, since this is a legacy way of handling parameters.
5850 compat_urllib_parse_quote_plus(iri_parts.params, safe=r"!$%&'()*+,/:;=@|~"),
5851
5852 # Not totally sure about the `safe` argument, since the source does not explicitly mention the query URI component.
5853 compat_urllib_parse_quote_plus(iri_parts.query, safe=r"!$%&'()*+,/:;=?@{|}~"),
5854
5855 compat_urllib_parse_quote_plus(iri_parts.fragment, safe=r"!#$%&'()*+,/:;=?@{|}~")))
5856
5857 # Source for `safe` arguments: https://url.spec.whatwg.org/#percent-encoded-bytes.
5858
5859
5860 def to_high_limit_path(path):
5861 if sys.platform in ['win32', 'cygwin']:
5862 # Work around MAX_PATH limitation on Windows. The maximum allowed length for the individual path segments may still be quite limited.
5863 return r'\\?\ '.rstrip() + os.path.abspath(path)
5864
5865 return path
5866
5867
5868 def format_field(obj, field, template='%s', ignore=(None, ''), default='', func=None):
5869 val = obj.get(field, default)
5870 if func and val not in ignore:
5871 val = func(val)
5872 return template % val if val not in ignore else default
5873
5874
5875 def clean_podcast_url(url):
5876 return re.sub(r'''(?x)
5877 (?:
5878 (?:
5879 chtbl\.com/track|
5880 media\.blubrry\.com| # https://create.blubrry.com/resources/podcast-media-download-statistics/getting-started/
5881 play\.podtrac\.com
5882 )/[^/]+|
5883 (?:dts|www)\.podtrac\.com/(?:pts/)?redirect\.[0-9a-z]{3,4}| # http://analytics.podtrac.com/how-to-measure
5884 flex\.acast\.com|
5885 pd(?:
5886 cn\.co| # https://podcorn.com/analytics-prefix/
5887 st\.fm # https://podsights.com/docs/
5888 )/e
5889 )/''', '', url)
5890
5891
5892 _HEX_TABLE = '0123456789abcdef'
5893
5894
5895 def random_uuidv4():
5896 return re.sub(r'[xy]', lambda x: _HEX_TABLE[random.randint(0, 15)], 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx')
5897
5898
5899 def make_dir(path, to_screen=None):
5900 try:
5901 dn = os.path.dirname(path)
5902 if dn and not os.path.exists(dn):
5903 os.makedirs(dn)
5904 return True
5905 except (OSError, IOError) as err:
5906 if callable(to_screen) is not None:
5907 to_screen('unable to create directory ' + error_to_compat_str(err))
5908 return False
5909
5910
5911 def get_executable_path():
5912 path = os.path.dirname(sys.argv[0])
5913 if os.path.abspath(sys.argv[0]) != os.path.abspath(sys.executable): # Not packaged
5914 path = os.path.join(path, '..')
5915 return os.path.abspath(path)
5916
5917
5918 def load_plugins(name, type, namespace):
5919 plugin_info = [None]
5920 classes = []
5921 try:
5922 plugin_info = imp.find_module(
5923 name, [os.path.join(get_executable_path(), 'ytdlp_plugins')])
5924 plugins = imp.load_module(name, *plugin_info)
5925 for name in dir(plugins):
5926 if not name.endswith(type):
5927 continue
5928 klass = getattr(plugins, name)
5929 classes.append(klass)
5930 namespace[name] = klass
5931 except ImportError:
5932 pass
5933 finally:
5934 if plugin_info[0] is not None:
5935 plugin_info[0].close()
5936 return classes