]> jfr.im git - yt-dlp.git/blame - yt_dlp/utils.py
[cleanup] Misc
[yt-dlp.git] / yt_dlp / utils.py
CommitLineData
cc52de43 1#!/usr/bin/env python3
dcdb292f 2# coding: utf-8
d77c3dfd 3
ecc0c5ee
PH
4from __future__ import unicode_literals
5
1e399778 6import base64
5bc880b9 7import binascii
912b38b4 8import calendar
676eb3f2 9import codecs
c380cc28 10import collections
62e609ab 11import contextlib
e3946f98 12import ctypes
c496ca96
PH
13import datetime
14import email.utils
0c265486 15import email.header
f45c185f 16import errno
be4a824d 17import functools
d77c3dfd 18import gzip
f74980cb 19import imp
03f9daab 20import io
79a2e94e 21import itertools
f4bfd65f 22import json
d77c3dfd 23import locale
02dbf93f 24import math
347de493 25import operator
d77c3dfd 26import os
c496ca96 27import platform
773f291d 28import random
d77c3dfd 29import re
c496ca96 30import socket
79a2e94e 31import ssl
1c088fa8 32import subprocess
d77c3dfd 33import sys
181c8655 34import tempfile
c380cc28 35import time
01951dda 36import traceback
bcf89ce6 37import xml.etree.ElementTree
d77c3dfd 38import zlib
d77c3dfd 39
8c25f81b 40from .compat import (
b4a3d461 41 compat_HTMLParseError,
8bb56eee 42 compat_HTMLParser,
201c1459 43 compat_HTTPError,
8f9312c3 44 compat_basestring,
8c25f81b 45 compat_chr,
1bab3437 46 compat_cookiejar,
d7cd9a9e 47 compat_ctypes_WINFUNCTYPE,
36e6f62c 48 compat_etree_fromstring,
51098426 49 compat_expanduser,
8c25f81b 50 compat_html_entities,
55b2f099 51 compat_html_entities_html5,
be4a824d 52 compat_http_client,
42db58ec 53 compat_integer_types,
e29663c6 54 compat_numeric_types,
c86b6142 55 compat_kwargs,
efa97bdc 56 compat_os_name,
8c25f81b 57 compat_parse_qs,
702ccf2d 58 compat_shlex_quote,
8c25f81b 59 compat_str,
edaa23f8 60 compat_struct_pack,
d3f8e038 61 compat_struct_unpack,
8c25f81b
PH
62 compat_urllib_error,
63 compat_urllib_parse,
15707c7e 64 compat_urllib_parse_urlencode,
8c25f81b 65 compat_urllib_parse_urlparse,
732044af 66 compat_urllib_parse_urlunparse,
67 compat_urllib_parse_quote,
68 compat_urllib_parse_quote_plus,
7581bfc9 69 compat_urllib_parse_unquote_plus,
8c25f81b
PH
70 compat_urllib_request,
71 compat_urlparse,
810c10ba 72 compat_xpath,
8c25f81b 73)
4644ac55 74
71aff188
YCH
75from .socks import (
76 ProxyType,
77 sockssocket,
78)
79
4644ac55 80
51fb4995
YCH
81def register_socks_protocols():
82 # "Register" SOCKS protocols
d5ae6bb5
YCH
83 # In Python < 2.6.5, urlsplit() suffers from bug https://bugs.python.org/issue7904
84 # URLs with protocols not in urlparse.uses_netloc are not handled correctly
51fb4995
YCH
85 for scheme in ('socks', 'socks4', 'socks4a', 'socks5'):
86 if scheme not in compat_urlparse.uses_netloc:
87 compat_urlparse.uses_netloc.append(scheme)
88
89
468e2e92
FV
90# This is not clearly defined otherwise
91compiled_regex_type = type(re.compile(''))
92
f7a147e3
S
93
94def random_user_agent():
95 _USER_AGENT_TPL = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36'
96 _CHROME_VERSIONS = (
97 '74.0.3729.129',
98 '76.0.3780.3',
99 '76.0.3780.2',
100 '74.0.3729.128',
101 '76.0.3780.1',
102 '76.0.3780.0',
103 '75.0.3770.15',
104 '74.0.3729.127',
105 '74.0.3729.126',
106 '76.0.3779.1',
107 '76.0.3779.0',
108 '75.0.3770.14',
109 '74.0.3729.125',
110 '76.0.3778.1',
111 '76.0.3778.0',
112 '75.0.3770.13',
113 '74.0.3729.124',
114 '74.0.3729.123',
115 '73.0.3683.121',
116 '76.0.3777.1',
117 '76.0.3777.0',
118 '75.0.3770.12',
119 '74.0.3729.122',
120 '76.0.3776.4',
121 '75.0.3770.11',
122 '74.0.3729.121',
123 '76.0.3776.3',
124 '76.0.3776.2',
125 '73.0.3683.120',
126 '74.0.3729.120',
127 '74.0.3729.119',
128 '74.0.3729.118',
129 '76.0.3776.1',
130 '76.0.3776.0',
131 '76.0.3775.5',
132 '75.0.3770.10',
133 '74.0.3729.117',
134 '76.0.3775.4',
135 '76.0.3775.3',
136 '74.0.3729.116',
137 '75.0.3770.9',
138 '76.0.3775.2',
139 '76.0.3775.1',
140 '76.0.3775.0',
141 '75.0.3770.8',
142 '74.0.3729.115',
143 '74.0.3729.114',
144 '76.0.3774.1',
145 '76.0.3774.0',
146 '75.0.3770.7',
147 '74.0.3729.113',
148 '74.0.3729.112',
149 '74.0.3729.111',
150 '76.0.3773.1',
151 '76.0.3773.0',
152 '75.0.3770.6',
153 '74.0.3729.110',
154 '74.0.3729.109',
155 '76.0.3772.1',
156 '76.0.3772.0',
157 '75.0.3770.5',
158 '74.0.3729.108',
159 '74.0.3729.107',
160 '76.0.3771.1',
161 '76.0.3771.0',
162 '75.0.3770.4',
163 '74.0.3729.106',
164 '74.0.3729.105',
165 '75.0.3770.3',
166 '74.0.3729.104',
167 '74.0.3729.103',
168 '74.0.3729.102',
169 '75.0.3770.2',
170 '74.0.3729.101',
171 '75.0.3770.1',
172 '75.0.3770.0',
173 '74.0.3729.100',
174 '75.0.3769.5',
175 '75.0.3769.4',
176 '74.0.3729.99',
177 '75.0.3769.3',
178 '75.0.3769.2',
179 '75.0.3768.6',
180 '74.0.3729.98',
181 '75.0.3769.1',
182 '75.0.3769.0',
183 '74.0.3729.97',
184 '73.0.3683.119',
185 '73.0.3683.118',
186 '74.0.3729.96',
187 '75.0.3768.5',
188 '75.0.3768.4',
189 '75.0.3768.3',
190 '75.0.3768.2',
191 '74.0.3729.95',
192 '74.0.3729.94',
193 '75.0.3768.1',
194 '75.0.3768.0',
195 '74.0.3729.93',
196 '74.0.3729.92',
197 '73.0.3683.117',
198 '74.0.3729.91',
199 '75.0.3766.3',
200 '74.0.3729.90',
201 '75.0.3767.2',
202 '75.0.3767.1',
203 '75.0.3767.0',
204 '74.0.3729.89',
205 '73.0.3683.116',
206 '75.0.3766.2',
207 '74.0.3729.88',
208 '75.0.3766.1',
209 '75.0.3766.0',
210 '74.0.3729.87',
211 '73.0.3683.115',
212 '74.0.3729.86',
213 '75.0.3765.1',
214 '75.0.3765.0',
215 '74.0.3729.85',
216 '73.0.3683.114',
217 '74.0.3729.84',
218 '75.0.3764.1',
219 '75.0.3764.0',
220 '74.0.3729.83',
221 '73.0.3683.113',
222 '75.0.3763.2',
223 '75.0.3761.4',
224 '74.0.3729.82',
225 '75.0.3763.1',
226 '75.0.3763.0',
227 '74.0.3729.81',
228 '73.0.3683.112',
229 '75.0.3762.1',
230 '75.0.3762.0',
231 '74.0.3729.80',
232 '75.0.3761.3',
233 '74.0.3729.79',
234 '73.0.3683.111',
235 '75.0.3761.2',
236 '74.0.3729.78',
237 '74.0.3729.77',
238 '75.0.3761.1',
239 '75.0.3761.0',
240 '73.0.3683.110',
241 '74.0.3729.76',
242 '74.0.3729.75',
243 '75.0.3760.0',
244 '74.0.3729.74',
245 '75.0.3759.8',
246 '75.0.3759.7',
247 '75.0.3759.6',
248 '74.0.3729.73',
249 '75.0.3759.5',
250 '74.0.3729.72',
251 '73.0.3683.109',
252 '75.0.3759.4',
253 '75.0.3759.3',
254 '74.0.3729.71',
255 '75.0.3759.2',
256 '74.0.3729.70',
257 '73.0.3683.108',
258 '74.0.3729.69',
259 '75.0.3759.1',
260 '75.0.3759.0',
261 '74.0.3729.68',
262 '73.0.3683.107',
263 '74.0.3729.67',
264 '75.0.3758.1',
265 '75.0.3758.0',
266 '74.0.3729.66',
267 '73.0.3683.106',
268 '74.0.3729.65',
269 '75.0.3757.1',
270 '75.0.3757.0',
271 '74.0.3729.64',
272 '73.0.3683.105',
273 '74.0.3729.63',
274 '75.0.3756.1',
275 '75.0.3756.0',
276 '74.0.3729.62',
277 '73.0.3683.104',
278 '75.0.3755.3',
279 '75.0.3755.2',
280 '73.0.3683.103',
281 '75.0.3755.1',
282 '75.0.3755.0',
283 '74.0.3729.61',
284 '73.0.3683.102',
285 '74.0.3729.60',
286 '75.0.3754.2',
287 '74.0.3729.59',
288 '75.0.3753.4',
289 '74.0.3729.58',
290 '75.0.3754.1',
291 '75.0.3754.0',
292 '74.0.3729.57',
293 '73.0.3683.101',
294 '75.0.3753.3',
295 '75.0.3752.2',
296 '75.0.3753.2',
297 '74.0.3729.56',
298 '75.0.3753.1',
299 '75.0.3753.0',
300 '74.0.3729.55',
301 '73.0.3683.100',
302 '74.0.3729.54',
303 '75.0.3752.1',
304 '75.0.3752.0',
305 '74.0.3729.53',
306 '73.0.3683.99',
307 '74.0.3729.52',
308 '75.0.3751.1',
309 '75.0.3751.0',
310 '74.0.3729.51',
311 '73.0.3683.98',
312 '74.0.3729.50',
313 '75.0.3750.0',
314 '74.0.3729.49',
315 '74.0.3729.48',
316 '74.0.3729.47',
317 '75.0.3749.3',
318 '74.0.3729.46',
319 '73.0.3683.97',
320 '75.0.3749.2',
321 '74.0.3729.45',
322 '75.0.3749.1',
323 '75.0.3749.0',
324 '74.0.3729.44',
325 '73.0.3683.96',
326 '74.0.3729.43',
327 '74.0.3729.42',
328 '75.0.3748.1',
329 '75.0.3748.0',
330 '74.0.3729.41',
331 '75.0.3747.1',
332 '73.0.3683.95',
333 '75.0.3746.4',
334 '74.0.3729.40',
335 '74.0.3729.39',
336 '75.0.3747.0',
337 '75.0.3746.3',
338 '75.0.3746.2',
339 '74.0.3729.38',
340 '75.0.3746.1',
341 '75.0.3746.0',
342 '74.0.3729.37',
343 '73.0.3683.94',
344 '75.0.3745.5',
345 '75.0.3745.4',
346 '75.0.3745.3',
347 '75.0.3745.2',
348 '74.0.3729.36',
349 '75.0.3745.1',
350 '75.0.3745.0',
351 '75.0.3744.2',
352 '74.0.3729.35',
353 '73.0.3683.93',
354 '74.0.3729.34',
355 '75.0.3744.1',
356 '75.0.3744.0',
357 '74.0.3729.33',
358 '73.0.3683.92',
359 '74.0.3729.32',
360 '74.0.3729.31',
361 '73.0.3683.91',
362 '75.0.3741.2',
363 '75.0.3740.5',
364 '74.0.3729.30',
365 '75.0.3741.1',
366 '75.0.3741.0',
367 '74.0.3729.29',
368 '75.0.3740.4',
369 '73.0.3683.90',
370 '74.0.3729.28',
371 '75.0.3740.3',
372 '73.0.3683.89',
373 '75.0.3740.2',
374 '74.0.3729.27',
375 '75.0.3740.1',
376 '75.0.3740.0',
377 '74.0.3729.26',
378 '73.0.3683.88',
379 '73.0.3683.87',
380 '74.0.3729.25',
381 '75.0.3739.1',
382 '75.0.3739.0',
383 '73.0.3683.86',
384 '74.0.3729.24',
385 '73.0.3683.85',
386 '75.0.3738.4',
387 '75.0.3738.3',
388 '75.0.3738.2',
389 '75.0.3738.1',
390 '75.0.3738.0',
391 '74.0.3729.23',
392 '73.0.3683.84',
393 '74.0.3729.22',
394 '74.0.3729.21',
395 '75.0.3737.1',
396 '75.0.3737.0',
397 '74.0.3729.20',
398 '73.0.3683.83',
399 '74.0.3729.19',
400 '75.0.3736.1',
401 '75.0.3736.0',
402 '74.0.3729.18',
403 '73.0.3683.82',
404 '74.0.3729.17',
405 '75.0.3735.1',
406 '75.0.3735.0',
407 '74.0.3729.16',
408 '73.0.3683.81',
409 '75.0.3734.1',
410 '75.0.3734.0',
411 '74.0.3729.15',
412 '73.0.3683.80',
413 '74.0.3729.14',
414 '75.0.3733.1',
415 '75.0.3733.0',
416 '75.0.3732.1',
417 '74.0.3729.13',
418 '74.0.3729.12',
419 '73.0.3683.79',
420 '74.0.3729.11',
421 '75.0.3732.0',
422 '74.0.3729.10',
423 '73.0.3683.78',
424 '74.0.3729.9',
425 '74.0.3729.8',
426 '74.0.3729.7',
427 '75.0.3731.3',
428 '75.0.3731.2',
429 '75.0.3731.0',
430 '74.0.3729.6',
431 '73.0.3683.77',
432 '73.0.3683.76',
433 '75.0.3730.5',
434 '75.0.3730.4',
435 '73.0.3683.75',
436 '74.0.3729.5',
437 '73.0.3683.74',
438 '75.0.3730.3',
439 '75.0.3730.2',
440 '74.0.3729.4',
441 '73.0.3683.73',
442 '73.0.3683.72',
443 '75.0.3730.1',
444 '75.0.3730.0',
445 '74.0.3729.3',
446 '73.0.3683.71',
447 '74.0.3729.2',
448 '73.0.3683.70',
449 '74.0.3729.1',
450 '74.0.3729.0',
451 '74.0.3726.4',
452 '73.0.3683.69',
453 '74.0.3726.3',
454 '74.0.3728.0',
455 '74.0.3726.2',
456 '73.0.3683.68',
457 '74.0.3726.1',
458 '74.0.3726.0',
459 '74.0.3725.4',
460 '73.0.3683.67',
461 '73.0.3683.66',
462 '74.0.3725.3',
463 '74.0.3725.2',
464 '74.0.3725.1',
465 '74.0.3724.8',
466 '74.0.3725.0',
467 '73.0.3683.65',
468 '74.0.3724.7',
469 '74.0.3724.6',
470 '74.0.3724.5',
471 '74.0.3724.4',
472 '74.0.3724.3',
473 '74.0.3724.2',
474 '74.0.3724.1',
475 '74.0.3724.0',
476 '73.0.3683.64',
477 '74.0.3723.1',
478 '74.0.3723.0',
479 '73.0.3683.63',
480 '74.0.3722.1',
481 '74.0.3722.0',
482 '73.0.3683.62',
483 '74.0.3718.9',
484 '74.0.3702.3',
485 '74.0.3721.3',
486 '74.0.3721.2',
487 '74.0.3721.1',
488 '74.0.3721.0',
489 '74.0.3720.6',
490 '73.0.3683.61',
491 '72.0.3626.122',
492 '73.0.3683.60',
493 '74.0.3720.5',
494 '72.0.3626.121',
495 '74.0.3718.8',
496 '74.0.3720.4',
497 '74.0.3720.3',
498 '74.0.3718.7',
499 '74.0.3720.2',
500 '74.0.3720.1',
501 '74.0.3720.0',
502 '74.0.3718.6',
503 '74.0.3719.5',
504 '73.0.3683.59',
505 '74.0.3718.5',
506 '74.0.3718.4',
507 '74.0.3719.4',
508 '74.0.3719.3',
509 '74.0.3719.2',
510 '74.0.3719.1',
511 '73.0.3683.58',
512 '74.0.3719.0',
513 '73.0.3683.57',
514 '73.0.3683.56',
515 '74.0.3718.3',
516 '73.0.3683.55',
517 '74.0.3718.2',
518 '74.0.3718.1',
519 '74.0.3718.0',
520 '73.0.3683.54',
521 '74.0.3717.2',
522 '73.0.3683.53',
523 '74.0.3717.1',
524 '74.0.3717.0',
525 '73.0.3683.52',
526 '74.0.3716.1',
527 '74.0.3716.0',
528 '73.0.3683.51',
529 '74.0.3715.1',
530 '74.0.3715.0',
531 '73.0.3683.50',
532 '74.0.3711.2',
533 '74.0.3714.2',
534 '74.0.3713.3',
535 '74.0.3714.1',
536 '74.0.3714.0',
537 '73.0.3683.49',
538 '74.0.3713.1',
539 '74.0.3713.0',
540 '72.0.3626.120',
541 '73.0.3683.48',
542 '74.0.3712.2',
543 '74.0.3712.1',
544 '74.0.3712.0',
545 '73.0.3683.47',
546 '72.0.3626.119',
547 '73.0.3683.46',
548 '74.0.3710.2',
549 '72.0.3626.118',
550 '74.0.3711.1',
551 '74.0.3711.0',
552 '73.0.3683.45',
553 '72.0.3626.117',
554 '74.0.3710.1',
555 '74.0.3710.0',
556 '73.0.3683.44',
557 '72.0.3626.116',
558 '74.0.3709.1',
559 '74.0.3709.0',
560 '74.0.3704.9',
561 '73.0.3683.43',
562 '72.0.3626.115',
563 '74.0.3704.8',
564 '74.0.3704.7',
565 '74.0.3708.0',
566 '74.0.3706.7',
567 '74.0.3704.6',
568 '73.0.3683.42',
569 '72.0.3626.114',
570 '74.0.3706.6',
571 '72.0.3626.113',
572 '74.0.3704.5',
573 '74.0.3706.5',
574 '74.0.3706.4',
575 '74.0.3706.3',
576 '74.0.3706.2',
577 '74.0.3706.1',
578 '74.0.3706.0',
579 '73.0.3683.41',
580 '72.0.3626.112',
581 '74.0.3705.1',
582 '74.0.3705.0',
583 '73.0.3683.40',
584 '72.0.3626.111',
585 '73.0.3683.39',
586 '74.0.3704.4',
587 '73.0.3683.38',
588 '74.0.3704.3',
589 '74.0.3704.2',
590 '74.0.3704.1',
591 '74.0.3704.0',
592 '73.0.3683.37',
593 '72.0.3626.110',
594 '72.0.3626.109',
595 '74.0.3703.3',
596 '74.0.3703.2',
597 '73.0.3683.36',
598 '74.0.3703.1',
599 '74.0.3703.0',
600 '73.0.3683.35',
601 '72.0.3626.108',
602 '74.0.3702.2',
603 '74.0.3699.3',
604 '74.0.3702.1',
605 '74.0.3702.0',
606 '73.0.3683.34',
607 '72.0.3626.107',
608 '73.0.3683.33',
609 '74.0.3701.1',
610 '74.0.3701.0',
611 '73.0.3683.32',
612 '73.0.3683.31',
613 '72.0.3626.105',
614 '74.0.3700.1',
615 '74.0.3700.0',
616 '73.0.3683.29',
617 '72.0.3626.103',
618 '74.0.3699.2',
619 '74.0.3699.1',
620 '74.0.3699.0',
621 '73.0.3683.28',
622 '72.0.3626.102',
623 '73.0.3683.27',
624 '73.0.3683.26',
625 '74.0.3698.0',
626 '74.0.3696.2',
627 '72.0.3626.101',
628 '73.0.3683.25',
629 '74.0.3696.1',
630 '74.0.3696.0',
631 '74.0.3694.8',
632 '72.0.3626.100',
633 '74.0.3694.7',
634 '74.0.3694.6',
635 '74.0.3694.5',
636 '74.0.3694.4',
637 '72.0.3626.99',
638 '72.0.3626.98',
639 '74.0.3694.3',
640 '73.0.3683.24',
641 '72.0.3626.97',
642 '72.0.3626.96',
643 '72.0.3626.95',
644 '73.0.3683.23',
645 '72.0.3626.94',
646 '73.0.3683.22',
647 '73.0.3683.21',
648 '72.0.3626.93',
649 '74.0.3694.2',
650 '72.0.3626.92',
651 '74.0.3694.1',
652 '74.0.3694.0',
653 '74.0.3693.6',
654 '73.0.3683.20',
655 '72.0.3626.91',
656 '74.0.3693.5',
657 '74.0.3693.4',
658 '74.0.3693.3',
659 '74.0.3693.2',
660 '73.0.3683.19',
661 '74.0.3693.1',
662 '74.0.3693.0',
663 '73.0.3683.18',
664 '72.0.3626.90',
665 '74.0.3692.1',
666 '74.0.3692.0',
667 '73.0.3683.17',
668 '72.0.3626.89',
669 '74.0.3687.3',
670 '74.0.3691.1',
671 '74.0.3691.0',
672 '73.0.3683.16',
673 '72.0.3626.88',
674 '72.0.3626.87',
675 '73.0.3683.15',
676 '74.0.3690.1',
677 '74.0.3690.0',
678 '73.0.3683.14',
679 '72.0.3626.86',
680 '73.0.3683.13',
681 '73.0.3683.12',
682 '74.0.3689.1',
683 '74.0.3689.0',
684 '73.0.3683.11',
685 '72.0.3626.85',
686 '73.0.3683.10',
687 '72.0.3626.84',
688 '73.0.3683.9',
689 '74.0.3688.1',
690 '74.0.3688.0',
691 '73.0.3683.8',
692 '72.0.3626.83',
693 '74.0.3687.2',
694 '74.0.3687.1',
695 '74.0.3687.0',
696 '73.0.3683.7',
697 '72.0.3626.82',
698 '74.0.3686.4',
699 '72.0.3626.81',
700 '74.0.3686.3',
701 '74.0.3686.2',
702 '74.0.3686.1',
703 '74.0.3686.0',
704 '73.0.3683.6',
705 '72.0.3626.80',
706 '74.0.3685.1',
707 '74.0.3685.0',
708 '73.0.3683.5',
709 '72.0.3626.79',
710 '74.0.3684.1',
711 '74.0.3684.0',
712 '73.0.3683.4',
713 '72.0.3626.78',
714 '72.0.3626.77',
715 '73.0.3683.3',
716 '73.0.3683.2',
717 '72.0.3626.76',
718 '73.0.3683.1',
719 '73.0.3683.0',
720 '72.0.3626.75',
721 '71.0.3578.141',
722 '73.0.3682.1',
723 '73.0.3682.0',
724 '72.0.3626.74',
725 '71.0.3578.140',
726 '73.0.3681.4',
727 '73.0.3681.3',
728 '73.0.3681.2',
729 '73.0.3681.1',
730 '73.0.3681.0',
731 '72.0.3626.73',
732 '71.0.3578.139',
733 '72.0.3626.72',
734 '72.0.3626.71',
735 '73.0.3680.1',
736 '73.0.3680.0',
737 '72.0.3626.70',
738 '71.0.3578.138',
739 '73.0.3678.2',
740 '73.0.3679.1',
741 '73.0.3679.0',
742 '72.0.3626.69',
743 '71.0.3578.137',
744 '73.0.3678.1',
745 '73.0.3678.0',
746 '71.0.3578.136',
747 '73.0.3677.1',
748 '73.0.3677.0',
749 '72.0.3626.68',
750 '72.0.3626.67',
751 '71.0.3578.135',
752 '73.0.3676.1',
753 '73.0.3676.0',
754 '73.0.3674.2',
755 '72.0.3626.66',
756 '71.0.3578.134',
757 '73.0.3674.1',
758 '73.0.3674.0',
759 '72.0.3626.65',
760 '71.0.3578.133',
761 '73.0.3673.2',
762 '73.0.3673.1',
763 '73.0.3673.0',
764 '72.0.3626.64',
765 '71.0.3578.132',
766 '72.0.3626.63',
767 '72.0.3626.62',
768 '72.0.3626.61',
769 '72.0.3626.60',
770 '73.0.3672.1',
771 '73.0.3672.0',
772 '72.0.3626.59',
773 '71.0.3578.131',
774 '73.0.3671.3',
775 '73.0.3671.2',
776 '73.0.3671.1',
777 '73.0.3671.0',
778 '72.0.3626.58',
779 '71.0.3578.130',
780 '73.0.3670.1',
781 '73.0.3670.0',
782 '72.0.3626.57',
783 '71.0.3578.129',
784 '73.0.3669.1',
785 '73.0.3669.0',
786 '72.0.3626.56',
787 '71.0.3578.128',
788 '73.0.3668.2',
789 '73.0.3668.1',
790 '73.0.3668.0',
791 '72.0.3626.55',
792 '71.0.3578.127',
793 '73.0.3667.2',
794 '73.0.3667.1',
795 '73.0.3667.0',
796 '72.0.3626.54',
797 '71.0.3578.126',
798 '73.0.3666.1',
799 '73.0.3666.0',
800 '72.0.3626.53',
801 '71.0.3578.125',
802 '73.0.3665.4',
803 '73.0.3665.3',
804 '72.0.3626.52',
805 '73.0.3665.2',
806 '73.0.3664.4',
807 '73.0.3665.1',
808 '73.0.3665.0',
809 '72.0.3626.51',
810 '71.0.3578.124',
811 '72.0.3626.50',
812 '73.0.3664.3',
813 '73.0.3664.2',
814 '73.0.3664.1',
815 '73.0.3664.0',
816 '73.0.3663.2',
817 '72.0.3626.49',
818 '71.0.3578.123',
819 '73.0.3663.1',
820 '73.0.3663.0',
821 '72.0.3626.48',
822 '71.0.3578.122',
823 '73.0.3662.1',
824 '73.0.3662.0',
825 '72.0.3626.47',
826 '71.0.3578.121',
827 '73.0.3661.1',
828 '72.0.3626.46',
829 '73.0.3661.0',
830 '72.0.3626.45',
831 '71.0.3578.120',
832 '73.0.3660.2',
833 '73.0.3660.1',
834 '73.0.3660.0',
835 '72.0.3626.44',
836 '71.0.3578.119',
837 '73.0.3659.1',
838 '73.0.3659.0',
839 '72.0.3626.43',
840 '71.0.3578.118',
841 '73.0.3658.1',
842 '73.0.3658.0',
843 '72.0.3626.42',
844 '71.0.3578.117',
845 '73.0.3657.1',
846 '73.0.3657.0',
847 '72.0.3626.41',
848 '71.0.3578.116',
849 '73.0.3656.1',
850 '73.0.3656.0',
851 '72.0.3626.40',
852 '71.0.3578.115',
853 '73.0.3655.1',
854 '73.0.3655.0',
855 '72.0.3626.39',
856 '71.0.3578.114',
857 '73.0.3654.1',
858 '73.0.3654.0',
859 '72.0.3626.38',
860 '71.0.3578.113',
861 '73.0.3653.1',
862 '73.0.3653.0',
863 '72.0.3626.37',
864 '71.0.3578.112',
865 '73.0.3652.1',
866 '73.0.3652.0',
867 '72.0.3626.36',
868 '71.0.3578.111',
869 '73.0.3651.1',
870 '73.0.3651.0',
871 '72.0.3626.35',
872 '71.0.3578.110',
873 '73.0.3650.1',
874 '73.0.3650.0',
875 '72.0.3626.34',
876 '71.0.3578.109',
877 '73.0.3649.1',
878 '73.0.3649.0',
879 '72.0.3626.33',
880 '71.0.3578.108',
881 '73.0.3648.2',
882 '73.0.3648.1',
883 '73.0.3648.0',
884 '72.0.3626.32',
885 '71.0.3578.107',
886 '73.0.3647.2',
887 '73.0.3647.1',
888 '73.0.3647.0',
889 '72.0.3626.31',
890 '71.0.3578.106',
891 '73.0.3635.3',
892 '73.0.3646.2',
893 '73.0.3646.1',
894 '73.0.3646.0',
895 '72.0.3626.30',
896 '71.0.3578.105',
897 '72.0.3626.29',
898 '73.0.3645.2',
899 '73.0.3645.1',
900 '73.0.3645.0',
901 '72.0.3626.28',
902 '71.0.3578.104',
903 '72.0.3626.27',
904 '72.0.3626.26',
905 '72.0.3626.25',
906 '72.0.3626.24',
907 '73.0.3644.0',
908 '73.0.3643.2',
909 '72.0.3626.23',
910 '71.0.3578.103',
911 '73.0.3643.1',
912 '73.0.3643.0',
913 '72.0.3626.22',
914 '71.0.3578.102',
915 '73.0.3642.1',
916 '73.0.3642.0',
917 '72.0.3626.21',
918 '71.0.3578.101',
919 '73.0.3641.1',
920 '73.0.3641.0',
921 '72.0.3626.20',
922 '71.0.3578.100',
923 '72.0.3626.19',
924 '73.0.3640.1',
925 '73.0.3640.0',
926 '72.0.3626.18',
927 '73.0.3639.1',
928 '71.0.3578.99',
929 '73.0.3639.0',
930 '72.0.3626.17',
931 '73.0.3638.2',
932 '72.0.3626.16',
933 '73.0.3638.1',
934 '73.0.3638.0',
935 '72.0.3626.15',
936 '71.0.3578.98',
937 '73.0.3635.2',
938 '71.0.3578.97',
939 '73.0.3637.1',
940 '73.0.3637.0',
941 '72.0.3626.14',
942 '71.0.3578.96',
943 '71.0.3578.95',
944 '72.0.3626.13',
945 '71.0.3578.94',
946 '73.0.3636.2',
947 '71.0.3578.93',
948 '73.0.3636.1',
949 '73.0.3636.0',
950 '72.0.3626.12',
951 '71.0.3578.92',
952 '73.0.3635.1',
953 '73.0.3635.0',
954 '72.0.3626.11',
955 '71.0.3578.91',
956 '73.0.3634.2',
957 '73.0.3634.1',
958 '73.0.3634.0',
959 '72.0.3626.10',
960 '71.0.3578.90',
961 '71.0.3578.89',
962 '73.0.3633.2',
963 '73.0.3633.1',
964 '73.0.3633.0',
965 '72.0.3610.4',
966 '72.0.3626.9',
967 '71.0.3578.88',
968 '73.0.3632.5',
969 '73.0.3632.4',
970 '73.0.3632.3',
971 '73.0.3632.2',
972 '73.0.3632.1',
973 '73.0.3632.0',
974 '72.0.3626.8',
975 '71.0.3578.87',
976 '73.0.3631.2',
977 '73.0.3631.1',
978 '73.0.3631.0',
979 '72.0.3626.7',
980 '71.0.3578.86',
981 '72.0.3626.6',
982 '73.0.3630.1',
983 '73.0.3630.0',
984 '72.0.3626.5',
985 '71.0.3578.85',
986 '72.0.3626.4',
987 '73.0.3628.3',
988 '73.0.3628.2',
989 '73.0.3629.1',
990 '73.0.3629.0',
991 '72.0.3626.3',
992 '71.0.3578.84',
993 '73.0.3628.1',
994 '73.0.3628.0',
995 '71.0.3578.83',
996 '73.0.3627.1',
997 '73.0.3627.0',
998 '72.0.3626.2',
999 '71.0.3578.82',
1000 '71.0.3578.81',
1001 '71.0.3578.80',
1002 '72.0.3626.1',
1003 '72.0.3626.0',
1004 '71.0.3578.79',
1005 '70.0.3538.124',
1006 '71.0.3578.78',
1007 '72.0.3623.4',
1008 '72.0.3625.2',
1009 '72.0.3625.1',
1010 '72.0.3625.0',
1011 '71.0.3578.77',
1012 '70.0.3538.123',
1013 '72.0.3624.4',
1014 '72.0.3624.3',
1015 '72.0.3624.2',
1016 '71.0.3578.76',
1017 '72.0.3624.1',
1018 '72.0.3624.0',
1019 '72.0.3623.3',
1020 '71.0.3578.75',
1021 '70.0.3538.122',
1022 '71.0.3578.74',
1023 '72.0.3623.2',
1024 '72.0.3610.3',
1025 '72.0.3623.1',
1026 '72.0.3623.0',
1027 '72.0.3622.3',
1028 '72.0.3622.2',
1029 '71.0.3578.73',
1030 '70.0.3538.121',
1031 '72.0.3622.1',
1032 '72.0.3622.0',
1033 '71.0.3578.72',
1034 '70.0.3538.120',
1035 '72.0.3621.1',
1036 '72.0.3621.0',
1037 '71.0.3578.71',
1038 '70.0.3538.119',
1039 '72.0.3620.1',
1040 '72.0.3620.0',
1041 '71.0.3578.70',
1042 '70.0.3538.118',
1043 '71.0.3578.69',
1044 '72.0.3619.1',
1045 '72.0.3619.0',
1046 '71.0.3578.68',
1047 '70.0.3538.117',
1048 '71.0.3578.67',
1049 '72.0.3618.1',
1050 '72.0.3618.0',
1051 '71.0.3578.66',
1052 '70.0.3538.116',
1053 '72.0.3617.1',
1054 '72.0.3617.0',
1055 '71.0.3578.65',
1056 '70.0.3538.115',
1057 '72.0.3602.3',
1058 '71.0.3578.64',
1059 '72.0.3616.1',
1060 '72.0.3616.0',
1061 '71.0.3578.63',
1062 '70.0.3538.114',
1063 '71.0.3578.62',
1064 '72.0.3615.1',
1065 '72.0.3615.0',
1066 '71.0.3578.61',
1067 '70.0.3538.113',
1068 '72.0.3614.1',
1069 '72.0.3614.0',
1070 '71.0.3578.60',
1071 '70.0.3538.112',
1072 '72.0.3613.1',
1073 '72.0.3613.0',
1074 '71.0.3578.59',
1075 '70.0.3538.111',
1076 '72.0.3612.2',
1077 '72.0.3612.1',
1078 '72.0.3612.0',
1079 '70.0.3538.110',
1080 '71.0.3578.58',
1081 '70.0.3538.109',
1082 '72.0.3611.2',
1083 '72.0.3611.1',
1084 '72.0.3611.0',
1085 '71.0.3578.57',
1086 '70.0.3538.108',
1087 '72.0.3610.2',
1088 '71.0.3578.56',
1089 '71.0.3578.55',
1090 '72.0.3610.1',
1091 '72.0.3610.0',
1092 '71.0.3578.54',
1093 '70.0.3538.107',
1094 '71.0.3578.53',
1095 '72.0.3609.3',
1096 '71.0.3578.52',
1097 '72.0.3609.2',
1098 '71.0.3578.51',
1099 '72.0.3608.5',
1100 '72.0.3609.1',
1101 '72.0.3609.0',
1102 '71.0.3578.50',
1103 '70.0.3538.106',
1104 '72.0.3608.4',
1105 '72.0.3608.3',
1106 '72.0.3608.2',
1107 '71.0.3578.49',
1108 '72.0.3608.1',
1109 '72.0.3608.0',
1110 '70.0.3538.105',
1111 '71.0.3578.48',
1112 '72.0.3607.1',
1113 '72.0.3607.0',
1114 '71.0.3578.47',
1115 '70.0.3538.104',
1116 '72.0.3606.2',
1117 '72.0.3606.1',
1118 '72.0.3606.0',
1119 '71.0.3578.46',
1120 '70.0.3538.103',
1121 '70.0.3538.102',
1122 '72.0.3605.3',
1123 '72.0.3605.2',
1124 '72.0.3605.1',
1125 '72.0.3605.0',
1126 '71.0.3578.45',
1127 '70.0.3538.101',
1128 '71.0.3578.44',
1129 '71.0.3578.43',
1130 '70.0.3538.100',
1131 '70.0.3538.99',
1132 '71.0.3578.42',
1133 '72.0.3604.1',
1134 '72.0.3604.0',
1135 '71.0.3578.41',
1136 '70.0.3538.98',
1137 '71.0.3578.40',
1138 '72.0.3603.2',
1139 '72.0.3603.1',
1140 '72.0.3603.0',
1141 '71.0.3578.39',
1142 '70.0.3538.97',
1143 '72.0.3602.2',
1144 '71.0.3578.38',
1145 '71.0.3578.37',
1146 '72.0.3602.1',
1147 '72.0.3602.0',
1148 '71.0.3578.36',
1149 '70.0.3538.96',
1150 '72.0.3601.1',
1151 '72.0.3601.0',
1152 '71.0.3578.35',
1153 '70.0.3538.95',
1154 '72.0.3600.1',
1155 '72.0.3600.0',
1156 '71.0.3578.34',
1157 '70.0.3538.94',
1158 '72.0.3599.3',
1159 '72.0.3599.2',
1160 '72.0.3599.1',
1161 '72.0.3599.0',
1162 '71.0.3578.33',
1163 '70.0.3538.93',
1164 '72.0.3598.1',
1165 '72.0.3598.0',
1166 '71.0.3578.32',
1167 '70.0.3538.87',
1168 '72.0.3597.1',
1169 '72.0.3597.0',
1170 '72.0.3596.2',
1171 '71.0.3578.31',
1172 '70.0.3538.86',
1173 '71.0.3578.30',
1174 '71.0.3578.29',
1175 '72.0.3596.1',
1176 '72.0.3596.0',
1177 '71.0.3578.28',
1178 '70.0.3538.85',
1179 '72.0.3595.2',
1180 '72.0.3591.3',
1181 '72.0.3595.1',
1182 '72.0.3595.0',
1183 '71.0.3578.27',
1184 '70.0.3538.84',
1185 '72.0.3594.1',
1186 '72.0.3594.0',
1187 '71.0.3578.26',
1188 '70.0.3538.83',
1189 '72.0.3593.2',
1190 '72.0.3593.1',
1191 '72.0.3593.0',
1192 '71.0.3578.25',
1193 '70.0.3538.82',
1194 '72.0.3589.3',
1195 '72.0.3592.2',
1196 '72.0.3592.1',
1197 '72.0.3592.0',
1198 '71.0.3578.24',
1199 '72.0.3589.2',
1200 '70.0.3538.81',
1201 '70.0.3538.80',
1202 '72.0.3591.2',
1203 '72.0.3591.1',
1204 '72.0.3591.0',
1205 '71.0.3578.23',
1206 '70.0.3538.79',
1207 '71.0.3578.22',
1208 '72.0.3590.1',
1209 '72.0.3590.0',
1210 '71.0.3578.21',
1211 '70.0.3538.78',
1212 '70.0.3538.77',
1213 '72.0.3589.1',
1214 '72.0.3589.0',
1215 '71.0.3578.20',
1216 '70.0.3538.76',
1217 '71.0.3578.19',
1218 '70.0.3538.75',
1219 '72.0.3588.1',
1220 '72.0.3588.0',
1221 '71.0.3578.18',
1222 '70.0.3538.74',
1223 '72.0.3586.2',
1224 '72.0.3587.0',
1225 '71.0.3578.17',
1226 '70.0.3538.73',
1227 '72.0.3586.1',
1228 '72.0.3586.0',
1229 '71.0.3578.16',
1230 '70.0.3538.72',
1231 '72.0.3585.1',
1232 '72.0.3585.0',
1233 '71.0.3578.15',
1234 '70.0.3538.71',
1235 '71.0.3578.14',
1236 '72.0.3584.1',
1237 '72.0.3584.0',
1238 '71.0.3578.13',
1239 '70.0.3538.70',
1240 '72.0.3583.2',
1241 '71.0.3578.12',
1242 '72.0.3583.1',
1243 '72.0.3583.0',
1244 '71.0.3578.11',
1245 '70.0.3538.69',
1246 '71.0.3578.10',
1247 '72.0.3582.0',
1248 '72.0.3581.4',
1249 '71.0.3578.9',
1250 '70.0.3538.67',
1251 '72.0.3581.3',
1252 '72.0.3581.2',
1253 '72.0.3581.1',
1254 '72.0.3581.0',
1255 '71.0.3578.8',
1256 '70.0.3538.66',
1257 '72.0.3580.1',
1258 '72.0.3580.0',
1259 '71.0.3578.7',
1260 '70.0.3538.65',
1261 '71.0.3578.6',
1262 '72.0.3579.1',
1263 '72.0.3579.0',
1264 '71.0.3578.5',
1265 '70.0.3538.64',
1266 '71.0.3578.4',
1267 '71.0.3578.3',
1268 '71.0.3578.2',
1269 '71.0.3578.1',
1270 '71.0.3578.0',
1271 '70.0.3538.63',
1272 '69.0.3497.128',
1273 '70.0.3538.62',
1274 '70.0.3538.61',
1275 '70.0.3538.60',
1276 '70.0.3538.59',
1277 '71.0.3577.1',
1278 '71.0.3577.0',
1279 '70.0.3538.58',
1280 '69.0.3497.127',
1281 '71.0.3576.2',
1282 '71.0.3576.1',
1283 '71.0.3576.0',
1284 '70.0.3538.57',
1285 '70.0.3538.56',
1286 '71.0.3575.2',
1287 '70.0.3538.55',
1288 '69.0.3497.126',
1289 '70.0.3538.54',
1290 '71.0.3575.1',
1291 '71.0.3575.0',
1292 '71.0.3574.1',
1293 '71.0.3574.0',
1294 '70.0.3538.53',
1295 '69.0.3497.125',
1296 '70.0.3538.52',
1297 '71.0.3573.1',
1298 '71.0.3573.0',
1299 '70.0.3538.51',
1300 '69.0.3497.124',
1301 '71.0.3572.1',
1302 '71.0.3572.0',
1303 '70.0.3538.50',
1304 '69.0.3497.123',
1305 '71.0.3571.2',
1306 '70.0.3538.49',
1307 '69.0.3497.122',
1308 '71.0.3571.1',
1309 '71.0.3571.0',
1310 '70.0.3538.48',
1311 '69.0.3497.121',
1312 '71.0.3570.1',
1313 '71.0.3570.0',
1314 '70.0.3538.47',
1315 '69.0.3497.120',
1316 '71.0.3568.2',
1317 '71.0.3569.1',
1318 '71.0.3569.0',
1319 '70.0.3538.46',
1320 '69.0.3497.119',
1321 '70.0.3538.45',
1322 '71.0.3568.1',
1323 '71.0.3568.0',
1324 '70.0.3538.44',
1325 '69.0.3497.118',
1326 '70.0.3538.43',
1327 '70.0.3538.42',
1328 '71.0.3567.1',
1329 '71.0.3567.0',
1330 '70.0.3538.41',
1331 '69.0.3497.117',
1332 '71.0.3566.1',
1333 '71.0.3566.0',
1334 '70.0.3538.40',
1335 '69.0.3497.116',
1336 '71.0.3565.1',
1337 '71.0.3565.0',
1338 '70.0.3538.39',
1339 '69.0.3497.115',
1340 '71.0.3564.1',
1341 '71.0.3564.0',
1342 '70.0.3538.38',
1343 '69.0.3497.114',
1344 '71.0.3563.0',
1345 '71.0.3562.2',
1346 '70.0.3538.37',
1347 '69.0.3497.113',
1348 '70.0.3538.36',
1349 '70.0.3538.35',
1350 '71.0.3562.1',
1351 '71.0.3562.0',
1352 '70.0.3538.34',
1353 '69.0.3497.112',
1354 '70.0.3538.33',
1355 '71.0.3561.1',
1356 '71.0.3561.0',
1357 '70.0.3538.32',
1358 '69.0.3497.111',
1359 '71.0.3559.6',
1360 '71.0.3560.1',
1361 '71.0.3560.0',
1362 '71.0.3559.5',
1363 '71.0.3559.4',
1364 '70.0.3538.31',
1365 '69.0.3497.110',
1366 '71.0.3559.3',
1367 '70.0.3538.30',
1368 '69.0.3497.109',
1369 '71.0.3559.2',
1370 '71.0.3559.1',
1371 '71.0.3559.0',
1372 '70.0.3538.29',
1373 '69.0.3497.108',
1374 '71.0.3558.2',
1375 '71.0.3558.1',
1376 '71.0.3558.0',
1377 '70.0.3538.28',
1378 '69.0.3497.107',
1379 '71.0.3557.2',
1380 '71.0.3557.1',
1381 '71.0.3557.0',
1382 '70.0.3538.27',
1383 '69.0.3497.106',
1384 '71.0.3554.4',
1385 '70.0.3538.26',
1386 '71.0.3556.1',
1387 '71.0.3556.0',
1388 '70.0.3538.25',
1389 '71.0.3554.3',
1390 '69.0.3497.105',
1391 '71.0.3554.2',
1392 '70.0.3538.24',
1393 '69.0.3497.104',
1394 '71.0.3555.2',
1395 '70.0.3538.23',
1396 '71.0.3555.1',
1397 '71.0.3555.0',
1398 '70.0.3538.22',
1399 '69.0.3497.103',
1400 '71.0.3554.1',
1401 '71.0.3554.0',
1402 '70.0.3538.21',
1403 '69.0.3497.102',
1404 '71.0.3553.3',
1405 '70.0.3538.20',
1406 '69.0.3497.101',
1407 '71.0.3553.2',
1408 '69.0.3497.100',
1409 '71.0.3553.1',
1410 '71.0.3553.0',
1411 '70.0.3538.19',
1412 '69.0.3497.99',
1413 '69.0.3497.98',
1414 '69.0.3497.97',
1415 '71.0.3552.6',
1416 '71.0.3552.5',
1417 '71.0.3552.4',
1418 '71.0.3552.3',
1419 '71.0.3552.2',
1420 '71.0.3552.1',
1421 '71.0.3552.0',
1422 '70.0.3538.18',
1423 '69.0.3497.96',
1424 '71.0.3551.3',
1425 '71.0.3551.2',
1426 '71.0.3551.1',
1427 '71.0.3551.0',
1428 '70.0.3538.17',
1429 '69.0.3497.95',
1430 '71.0.3550.3',
1431 '71.0.3550.2',
1432 '71.0.3550.1',
1433 '71.0.3550.0',
1434 '70.0.3538.16',
1435 '69.0.3497.94',
1436 '71.0.3549.1',
1437 '71.0.3549.0',
1438 '70.0.3538.15',
1439 '69.0.3497.93',
1440 '69.0.3497.92',
1441 '71.0.3548.1',
1442 '71.0.3548.0',
1443 '70.0.3538.14',
1444 '69.0.3497.91',
1445 '71.0.3547.1',
1446 '71.0.3547.0',
1447 '70.0.3538.13',
1448 '69.0.3497.90',
1449 '71.0.3546.2',
1450 '69.0.3497.89',
1451 '71.0.3546.1',
1452 '71.0.3546.0',
1453 '70.0.3538.12',
1454 '69.0.3497.88',
1455 '71.0.3545.4',
1456 '71.0.3545.3',
1457 '71.0.3545.2',
1458 '71.0.3545.1',
1459 '71.0.3545.0',
1460 '70.0.3538.11',
1461 '69.0.3497.87',
1462 '71.0.3544.5',
1463 '71.0.3544.4',
1464 '71.0.3544.3',
1465 '71.0.3544.2',
1466 '71.0.3544.1',
1467 '71.0.3544.0',
1468 '69.0.3497.86',
1469 '70.0.3538.10',
1470 '69.0.3497.85',
1471 '70.0.3538.9',
1472 '69.0.3497.84',
1473 '71.0.3543.4',
1474 '70.0.3538.8',
1475 '71.0.3543.3',
1476 '71.0.3543.2',
1477 '71.0.3543.1',
1478 '71.0.3543.0',
1479 '70.0.3538.7',
1480 '69.0.3497.83',
1481 '71.0.3542.2',
1482 '71.0.3542.1',
1483 '71.0.3542.0',
1484 '70.0.3538.6',
1485 '69.0.3497.82',
1486 '69.0.3497.81',
1487 '71.0.3541.1',
1488 '71.0.3541.0',
1489 '70.0.3538.5',
1490 '69.0.3497.80',
1491 '71.0.3540.1',
1492 '71.0.3540.0',
1493 '70.0.3538.4',
1494 '69.0.3497.79',
1495 '70.0.3538.3',
1496 '71.0.3539.1',
1497 '71.0.3539.0',
1498 '69.0.3497.78',
1499 '68.0.3440.134',
1500 '69.0.3497.77',
1501 '70.0.3538.2',
1502 '70.0.3538.1',
1503 '70.0.3538.0',
1504 '69.0.3497.76',
1505 '68.0.3440.133',
1506 '69.0.3497.75',
1507 '70.0.3537.2',
1508 '70.0.3537.1',
1509 '70.0.3537.0',
1510 '69.0.3497.74',
1511 '68.0.3440.132',
1512 '70.0.3536.0',
1513 '70.0.3535.5',
1514 '70.0.3535.4',
1515 '70.0.3535.3',
1516 '69.0.3497.73',
1517 '68.0.3440.131',
1518 '70.0.3532.8',
1519 '70.0.3532.7',
1520 '69.0.3497.72',
1521 '69.0.3497.71',
1522 '70.0.3535.2',
1523 '70.0.3535.1',
1524 '70.0.3535.0',
1525 '69.0.3497.70',
1526 '68.0.3440.130',
1527 '69.0.3497.69',
1528 '68.0.3440.129',
1529 '70.0.3534.4',
1530 '70.0.3534.3',
1531 '70.0.3534.2',
1532 '70.0.3534.1',
1533 '70.0.3534.0',
1534 '69.0.3497.68',
1535 '68.0.3440.128',
1536 '70.0.3533.2',
1537 '70.0.3533.1',
1538 '70.0.3533.0',
1539 '69.0.3497.67',
1540 '68.0.3440.127',
1541 '70.0.3532.6',
1542 '70.0.3532.5',
1543 '70.0.3532.4',
1544 '69.0.3497.66',
1545 '68.0.3440.126',
1546 '70.0.3532.3',
1547 '70.0.3532.2',
1548 '70.0.3532.1',
1549 '69.0.3497.60',
1550 '69.0.3497.65',
1551 '69.0.3497.64',
1552 '70.0.3532.0',
1553 '70.0.3531.0',
1554 '70.0.3530.4',
1555 '70.0.3530.3',
1556 '70.0.3530.2',
1557 '69.0.3497.58',
1558 '68.0.3440.125',
1559 '69.0.3497.57',
1560 '69.0.3497.56',
1561 '69.0.3497.55',
1562 '69.0.3497.54',
1563 '70.0.3530.1',
1564 '70.0.3530.0',
1565 '69.0.3497.53',
1566 '68.0.3440.124',
1567 '69.0.3497.52',
1568 '70.0.3529.3',
1569 '70.0.3529.2',
1570 '70.0.3529.1',
1571 '70.0.3529.0',
1572 '69.0.3497.51',
1573 '70.0.3528.4',
1574 '68.0.3440.123',
1575 '70.0.3528.3',
1576 '70.0.3528.2',
1577 '70.0.3528.1',
1578 '70.0.3528.0',
1579 '69.0.3497.50',
1580 '68.0.3440.122',
1581 '70.0.3527.1',
1582 '70.0.3527.0',
1583 '69.0.3497.49',
1584 '68.0.3440.121',
1585 '70.0.3526.1',
1586 '70.0.3526.0',
1587 '68.0.3440.120',
1588 '69.0.3497.48',
1589 '69.0.3497.47',
1590 '68.0.3440.119',
1591 '68.0.3440.118',
1592 '70.0.3525.5',
1593 '70.0.3525.4',
1594 '70.0.3525.3',
1595 '68.0.3440.117',
1596 '69.0.3497.46',
1597 '70.0.3525.2',
1598 '70.0.3525.1',
1599 '70.0.3525.0',
1600 '69.0.3497.45',
1601 '68.0.3440.116',
1602 '70.0.3524.4',
1603 '70.0.3524.3',
1604 '69.0.3497.44',
1605 '70.0.3524.2',
1606 '70.0.3524.1',
1607 '70.0.3524.0',
1608 '70.0.3523.2',
1609 '69.0.3497.43',
1610 '68.0.3440.115',
1611 '70.0.3505.9',
1612 '69.0.3497.42',
1613 '70.0.3505.8',
1614 '70.0.3523.1',
1615 '70.0.3523.0',
1616 '69.0.3497.41',
1617 '68.0.3440.114',
1618 '70.0.3505.7',
1619 '69.0.3497.40',
1620 '70.0.3522.1',
1621 '70.0.3522.0',
1622 '70.0.3521.2',
1623 '69.0.3497.39',
1624 '68.0.3440.113',
1625 '70.0.3505.6',
1626 '70.0.3521.1',
1627 '70.0.3521.0',
1628 '69.0.3497.38',
1629 '68.0.3440.112',
1630 '70.0.3520.1',
1631 '70.0.3520.0',
1632 '69.0.3497.37',
1633 '68.0.3440.111',
1634 '70.0.3519.3',
1635 '70.0.3519.2',
1636 '70.0.3519.1',
1637 '70.0.3519.0',
1638 '69.0.3497.36',
1639 '68.0.3440.110',
1640 '70.0.3518.1',
1641 '70.0.3518.0',
1642 '69.0.3497.35',
1643 '69.0.3497.34',
1644 '68.0.3440.109',
1645 '70.0.3517.1',
1646 '70.0.3517.0',
1647 '69.0.3497.33',
1648 '68.0.3440.108',
1649 '69.0.3497.32',
1650 '70.0.3516.3',
1651 '70.0.3516.2',
1652 '70.0.3516.1',
1653 '70.0.3516.0',
1654 '69.0.3497.31',
1655 '68.0.3440.107',
1656 '70.0.3515.4',
1657 '68.0.3440.106',
1658 '70.0.3515.3',
1659 '70.0.3515.2',
1660 '70.0.3515.1',
1661 '70.0.3515.0',
1662 '69.0.3497.30',
1663 '68.0.3440.105',
1664 '68.0.3440.104',
1665 '70.0.3514.2',
1666 '70.0.3514.1',
1667 '70.0.3514.0',
1668 '69.0.3497.29',
1669 '68.0.3440.103',
1670 '70.0.3513.1',
1671 '70.0.3513.0',
1672 '69.0.3497.28',
1673 )
1674 return _USER_AGENT_TPL % random.choice(_CHROME_VERSIONS)
1675
1676
3e669f36 1677std_headers = {
f7a147e3 1678 'User-Agent': random_user_agent(),
59ae15a5
PH
1679 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
1680 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
1681 'Accept-Encoding': 'gzip, deflate',
1682 'Accept-Language': 'en-us,en;q=0.5',
3e669f36 1683}
f427df17 1684
5f6a1245 1685
fb37eb25
S
1686USER_AGENTS = {
1687 'Safari': 'Mozilla/5.0 (X11; Linux x86_64; rv:10.0) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27',
1688}
1689
1690
bf42a990
S
1691NO_DEFAULT = object()
1692
7105440c
YCH
1693ENGLISH_MONTH_NAMES = [
1694 'January', 'February', 'March', 'April', 'May', 'June',
1695 'July', 'August', 'September', 'October', 'November', 'December']
1696
f6717dec
S
1697MONTH_NAMES = {
1698 'en': ENGLISH_MONTH_NAMES,
1699 'fr': [
3e4185c3
S
1700 'janvier', 'février', 'mars', 'avril', 'mai', 'juin',
1701 'juillet', 'août', 'septembre', 'octobre', 'novembre', 'décembre'],
f6717dec 1702}
a942d6cb 1703
a7aaa398
S
1704KNOWN_EXTENSIONS = (
1705 'mp4', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'aac',
1706 'flv', 'f4v', 'f4a', 'f4b',
1707 'webm', 'ogg', 'ogv', 'oga', 'ogx', 'spx', 'opus',
1708 'mkv', 'mka', 'mk3d',
1709 'avi', 'divx',
1710 'mov',
1711 'asf', 'wmv', 'wma',
1712 '3gp', '3g2',
1713 'mp3',
1714 'flac',
1715 'ape',
1716 'wav',
1717 'f4f', 'f4m', 'm3u8', 'smil')
1718
c587cbb7 1719# needed for sanitizing filenames in restricted mode
c8827027 1720ACCENT_CHARS = dict(zip('ÂÃÄÀÁÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖŐØŒÙÚÛÜŰÝÞßàáâãäåæçèéêëìíîïðñòóôõöőøœùúûüűýþÿ',
fd35d8cd
JW
1721 itertools.chain('AAAAAA', ['AE'], 'CEEEEIIIIDNOOOOOOO', ['OE'], 'UUUUUY', ['TH', 'ss'],
1722 'aaaaaa', ['ae'], 'ceeeeiiiionooooooo', ['oe'], 'uuuuuy', ['th'], 'y')))
c587cbb7 1723
46f59e89
S
1724DATE_FORMATS = (
1725 '%d %B %Y',
1726 '%d %b %Y',
1727 '%B %d %Y',
cb655f34
S
1728 '%B %dst %Y',
1729 '%B %dnd %Y',
9d30c213 1730 '%B %drd %Y',
cb655f34 1731 '%B %dth %Y',
46f59e89 1732 '%b %d %Y',
cb655f34
S
1733 '%b %dst %Y',
1734 '%b %dnd %Y',
9d30c213 1735 '%b %drd %Y',
cb655f34 1736 '%b %dth %Y',
46f59e89
S
1737 '%b %dst %Y %I:%M',
1738 '%b %dnd %Y %I:%M',
9d30c213 1739 '%b %drd %Y %I:%M',
46f59e89
S
1740 '%b %dth %Y %I:%M',
1741 '%Y %m %d',
1742 '%Y-%m-%d',
1743 '%Y/%m/%d',
81c13222 1744 '%Y/%m/%d %H:%M',
46f59e89 1745 '%Y/%m/%d %H:%M:%S',
1931a55e
THD
1746 '%Y%m%d%H%M',
1747 '%Y%m%d%H%M%S',
0c1c6f4b 1748 '%Y-%m-%d %H:%M',
46f59e89
S
1749 '%Y-%m-%d %H:%M:%S',
1750 '%Y-%m-%d %H:%M:%S.%f',
5014558a 1751 '%Y-%m-%d %H:%M:%S:%f',
46f59e89
S
1752 '%d.%m.%Y %H:%M',
1753 '%d.%m.%Y %H.%M',
1754 '%Y-%m-%dT%H:%M:%SZ',
1755 '%Y-%m-%dT%H:%M:%S.%fZ',
1756 '%Y-%m-%dT%H:%M:%S.%f0Z',
1757 '%Y-%m-%dT%H:%M:%S',
1758 '%Y-%m-%dT%H:%M:%S.%f',
1759 '%Y-%m-%dT%H:%M',
c6eed6b8
S
1760 '%b %d %Y at %H:%M',
1761 '%b %d %Y at %H:%M:%S',
b555ae9b
S
1762 '%B %d %Y at %H:%M',
1763 '%B %d %Y at %H:%M:%S',
46f59e89
S
1764)
1765
1766DATE_FORMATS_DAY_FIRST = list(DATE_FORMATS)
1767DATE_FORMATS_DAY_FIRST.extend([
1768 '%d-%m-%Y',
1769 '%d.%m.%Y',
1770 '%d.%m.%y',
1771 '%d/%m/%Y',
1772 '%d/%m/%y',
1773 '%d/%m/%Y %H:%M:%S',
1774])
1775
1776DATE_FORMATS_MONTH_FIRST = list(DATE_FORMATS)
1777DATE_FORMATS_MONTH_FIRST.extend([
1778 '%m-%d-%Y',
1779 '%m.%d.%Y',
1780 '%m/%d/%Y',
1781 '%m/%d/%y',
1782 '%m/%d/%Y %H:%M:%S',
1783])
1784
06b3fe29 1785PACKED_CODES_RE = r"}\('(.+)',(\d+),(\d+),'([^']+)'\.split\('\|'\)"
22f5f5c6 1786JSON_LD_RE = r'(?is)<script[^>]+type=(["\']?)application/ld\+json\1[^>]*>(?P<json_ld>.+?)</script>'
06b3fe29 1787
7105440c 1788
d77c3dfd 1789def preferredencoding():
59ae15a5 1790 """Get preferred encoding.
d77c3dfd 1791
59ae15a5
PH
1792 Returns the best encoding scheme for the system, based on
1793 locale.getpreferredencoding() and some further tweaks.
1794 """
1795 try:
1796 pref = locale.getpreferredencoding()
28e614de 1797 'TEST'.encode(pref)
70a1165b 1798 except Exception:
59ae15a5 1799 pref = 'UTF-8'
bae611f2 1800
59ae15a5 1801 return pref
d77c3dfd 1802
f4bfd65f 1803
181c8655 1804def write_json_file(obj, fn):
1394646a 1805 """ Encode obj as JSON and write it to fn, atomically if possible """
181c8655 1806
92120217 1807 fn = encodeFilename(fn)
61ee5aeb 1808 if sys.version_info < (3, 0) and sys.platform != 'win32':
ec5f6016
JMF
1809 encoding = get_filesystem_encoding()
1810 # os.path.basename returns a bytes object, but NamedTemporaryFile
1811 # will fail if the filename contains non ascii characters unless we
1812 # use a unicode object
1813 path_basename = lambda f: os.path.basename(fn).decode(encoding)
1814 # the same for os.path.dirname
1815 path_dirname = lambda f: os.path.dirname(fn).decode(encoding)
1816 else:
1817 path_basename = os.path.basename
1818 path_dirname = os.path.dirname
1819
73159f99
S
1820 args = {
1821 'suffix': '.tmp',
ec5f6016
JMF
1822 'prefix': path_basename(fn) + '.',
1823 'dir': path_dirname(fn),
73159f99
S
1824 'delete': False,
1825 }
1826
181c8655
PH
1827 # In Python 2.x, json.dump expects a bytestream.
1828 # In Python 3.x, it writes to a character stream
1829 if sys.version_info < (3, 0):
73159f99 1830 args['mode'] = 'wb'
181c8655 1831 else:
73159f99
S
1832 args.update({
1833 'mode': 'w',
1834 'encoding': 'utf-8',
1835 })
1836
c86b6142 1837 tf = tempfile.NamedTemporaryFile(**compat_kwargs(args))
181c8655
PH
1838
1839 try:
1840 with tf:
6e84b215 1841 json.dump(obj, tf)
1394646a
IK
1842 if sys.platform == 'win32':
1843 # Need to remove existing file on Windows, else os.rename raises
1844 # WindowsError or FileExistsError.
1845 try:
1846 os.unlink(fn)
1847 except OSError:
1848 pass
9cd5f54e
R
1849 try:
1850 mask = os.umask(0)
1851 os.umask(mask)
1852 os.chmod(tf.name, 0o666 & ~mask)
1853 except OSError:
1854 pass
181c8655 1855 os.rename(tf.name, fn)
70a1165b 1856 except Exception:
181c8655
PH
1857 try:
1858 os.remove(tf.name)
1859 except OSError:
1860 pass
1861 raise
1862
1863
1864if sys.version_info >= (2, 7):
ee114368 1865 def find_xpath_attr(node, xpath, key, val=None):
59ae56fa 1866 """ Find the xpath xpath[@key=val] """
5d2354f1 1867 assert re.match(r'^[a-zA-Z_-]+$', key)
ee114368 1868 expr = xpath + ('[@%s]' % key if val is None else "[@%s='%s']" % (key, val))
59ae56fa
PH
1869 return node.find(expr)
1870else:
ee114368 1871 def find_xpath_attr(node, xpath, key, val=None):
810c10ba 1872 for f in node.findall(compat_xpath(xpath)):
ee114368
S
1873 if key not in f.attrib:
1874 continue
1875 if val is None or f.attrib.get(key) == val:
59ae56fa
PH
1876 return f
1877 return None
1878
d7e66d39
JMF
1879# On python2.6 the xml.etree.ElementTree.Element methods don't support
1880# the namespace parameter
5f6a1245
JW
1881
1882
d7e66d39
JMF
1883def xpath_with_ns(path, ns_map):
1884 components = [c.split(':') for c in path.split('/')]
1885 replaced = []
1886 for c in components:
1887 if len(c) == 1:
1888 replaced.append(c[0])
1889 else:
1890 ns, tag = c
1891 replaced.append('{%s}%s' % (ns_map[ns], tag))
1892 return '/'.join(replaced)
1893
d77c3dfd 1894
a41fb80c 1895def xpath_element(node, xpath, name=None, fatal=False, default=NO_DEFAULT):
578c0745 1896 def _find_xpath(xpath):
810c10ba 1897 return node.find(compat_xpath(xpath))
578c0745
S
1898
1899 if isinstance(xpath, (str, compat_str)):
1900 n = _find_xpath(xpath)
1901 else:
1902 for xp in xpath:
1903 n = _find_xpath(xp)
1904 if n is not None:
1905 break
d74bebd5 1906
8e636da4 1907 if n is None:
bf42a990
S
1908 if default is not NO_DEFAULT:
1909 return default
1910 elif fatal:
bf0ff932
PH
1911 name = xpath if name is None else name
1912 raise ExtractorError('Could not find XML element %s' % name)
1913 else:
1914 return None
a41fb80c
S
1915 return n
1916
1917
1918def xpath_text(node, xpath, name=None, fatal=False, default=NO_DEFAULT):
8e636da4
S
1919 n = xpath_element(node, xpath, name, fatal=fatal, default=default)
1920 if n is None or n == default:
1921 return n
1922 if n.text is None:
1923 if default is not NO_DEFAULT:
1924 return default
1925 elif fatal:
1926 name = xpath if name is None else name
1927 raise ExtractorError('Could not find XML element\'s text %s' % name)
1928 else:
1929 return None
1930 return n.text
a41fb80c
S
1931
1932
1933def xpath_attr(node, xpath, key, name=None, fatal=False, default=NO_DEFAULT):
1934 n = find_xpath_attr(node, xpath, key)
1935 if n is None:
1936 if default is not NO_DEFAULT:
1937 return default
1938 elif fatal:
1939 name = '%s[@%s]' % (xpath, key) if name is None else name
1940 raise ExtractorError('Could not find XML attribute %s' % name)
1941 else:
1942 return None
1943 return n.attrib[key]
bf0ff932
PH
1944
1945
9e6dd238 1946def get_element_by_id(id, html):
43e8fafd 1947 """Return the content of the tag with the specified ID in the passed HTML document"""
611c1dd9 1948 return get_element_by_attribute('id', id, html)
43e8fafd 1949
12ea2f30 1950
84c237fb 1951def get_element_by_class(class_name, html):
2af12ad9
TC
1952 """Return the content of the first tag with the specified class in the passed HTML document"""
1953 retval = get_elements_by_class(class_name, html)
1954 return retval[0] if retval else None
1955
1956
1957def get_element_by_attribute(attribute, value, html, escape_value=True):
1958 retval = get_elements_by_attribute(attribute, value, html, escape_value)
1959 return retval[0] if retval else None
1960
1961
1962def get_elements_by_class(class_name, html):
1963 """Return the content of all tags with the specified class in the passed HTML document as a list"""
1964 return get_elements_by_attribute(
84c237fb
YCH
1965 'class', r'[^\'"]*\b%s\b[^\'"]*' % re.escape(class_name),
1966 html, escape_value=False)
1967
1968
2af12ad9 1969def get_elements_by_attribute(attribute, value, html, escape_value=True):
43e8fafd 1970 """Return the content of the tag with the specified attribute in the passed HTML document"""
9e6dd238 1971
84c237fb
YCH
1972 value = re.escape(value) if escape_value else value
1973
2af12ad9
TC
1974 retlist = []
1975 for m in re.finditer(r'''(?xs)
38285056 1976 <([a-zA-Z0-9:._-]+)
609ff8ca 1977 (?:\s+[a-zA-Z0-9:._-]+(?:=[a-zA-Z0-9:._-]*|="[^"]*"|='[^']*'|))*?
38285056 1978 \s+%s=['"]?%s['"]?
609ff8ca 1979 (?:\s+[a-zA-Z0-9:._-]+(?:=[a-zA-Z0-9:._-]*|="[^"]*"|='[^']*'|))*?
38285056
PH
1980 \s*>
1981 (?P<content>.*?)
1982 </\1>
2af12ad9
TC
1983 ''' % (re.escape(attribute), value), html):
1984 res = m.group('content')
38285056 1985
2af12ad9
TC
1986 if res.startswith('"') or res.startswith("'"):
1987 res = res[1:-1]
38285056 1988
2af12ad9 1989 retlist.append(unescapeHTML(res))
a921f407 1990
2af12ad9 1991 return retlist
a921f407 1992
c5229f39 1993
8bb56eee
BF
1994class HTMLAttributeParser(compat_HTMLParser):
1995 """Trivial HTML parser to gather the attributes for a single element"""
b6e0c7d2 1996
8bb56eee 1997 def __init__(self):
c5229f39 1998 self.attrs = {}
8bb56eee
BF
1999 compat_HTMLParser.__init__(self)
2000
2001 def handle_starttag(self, tag, attrs):
2002 self.attrs = dict(attrs)
2003
c5229f39 2004
8bb56eee
BF
2005def extract_attributes(html_element):
2006 """Given a string for an HTML element such as
2007 <el
2008 a="foo" B="bar" c="&98;az" d=boz
2009 empty= noval entity="&amp;"
2010 sq='"' dq="'"
2011 >
2012 Decode and return a dictionary of attributes.
2013 {
2014 'a': 'foo', 'b': 'bar', c: 'baz', d: 'boz',
2015 'empty': '', 'noval': None, 'entity': '&',
2016 'sq': '"', 'dq': '\''
2017 }.
2018 NB HTMLParser is stricter in Python 2.6 & 3.2 than in later versions,
2019 but the cases in the unit test will work for all of 2.6, 2.7, 3.2-3.5.
2020 """
2021 parser = HTMLAttributeParser()
b4a3d461
S
2022 try:
2023 parser.feed(html_element)
2024 parser.close()
2025 # Older Python may throw HTMLParseError in case of malformed HTML
2026 except compat_HTMLParseError:
2027 pass
8bb56eee 2028 return parser.attrs
9e6dd238 2029
c5229f39 2030
9e6dd238 2031def clean_html(html):
59ae15a5 2032 """Clean an HTML snippet into a readable string"""
dd622d7c
PH
2033
2034 if html is None: # Convenience for sanitizing descriptions etc.
2035 return html
2036
59ae15a5
PH
2037 # Newline vs <br />
2038 html = html.replace('\n', ' ')
edd9221c
TF
2039 html = re.sub(r'(?u)\s*<\s*br\s*/?\s*>\s*', '\n', html)
2040 html = re.sub(r'(?u)<\s*/\s*p\s*>\s*<\s*p[^>]*>', '\n', html)
59ae15a5
PH
2041 # Strip html tags
2042 html = re.sub('<.*?>', '', html)
2043 # Replace html entities
2044 html = unescapeHTML(html)
7decf895 2045 return html.strip()
9e6dd238
FV
2046
2047
d77c3dfd 2048def sanitize_open(filename, open_mode):
59ae15a5
PH
2049 """Try to open the given filename, and slightly tweak it if this fails.
2050
2051 Attempts to open the given filename. If this fails, it tries to change
2052 the filename slightly, step by step, until it's either able to open it
2053 or it fails and raises a final exception, like the standard open()
2054 function.
2055
2056 It returns the tuple (stream, definitive_file_name).
2057 """
2058 try:
28e614de 2059 if filename == '-':
59ae15a5
PH
2060 if sys.platform == 'win32':
2061 import msvcrt
2062 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
898280a0 2063 return (sys.stdout.buffer if hasattr(sys.stdout, 'buffer') else sys.stdout, filename)
59ae15a5
PH
2064 stream = open(encodeFilename(filename), open_mode)
2065 return (stream, filename)
2066 except (IOError, OSError) as err:
f45c185f
PH
2067 if err.errno in (errno.EACCES,):
2068 raise
59ae15a5 2069
f45c185f 2070 # In case of error, try to remove win32 forbidden chars
d55de57b 2071 alt_filename = sanitize_path(filename)
f45c185f
PH
2072 if alt_filename == filename:
2073 raise
2074 else:
2075 # An exception here should be caught in the caller
d55de57b 2076 stream = open(encodeFilename(alt_filename), open_mode)
f45c185f 2077 return (stream, alt_filename)
d77c3dfd
FV
2078
2079
2080def timeconvert(timestr):
59ae15a5
PH
2081 """Convert RFC 2822 defined time string into system timestamp"""
2082 timestamp = None
2083 timetuple = email.utils.parsedate_tz(timestr)
2084 if timetuple is not None:
2085 timestamp = email.utils.mktime_tz(timetuple)
2086 return timestamp
1c469a94 2087
5f6a1245 2088
796173d0 2089def sanitize_filename(s, restricted=False, is_id=False):
59ae15a5
PH
2090 """Sanitizes a string so it could be used as part of a filename.
2091 If restricted is set, use a stricter subset of allowed characters.
158af524
S
2092 Set is_id if this is not an arbitrary string, but an ID that should be kept
2093 if possible.
59ae15a5
PH
2094 """
2095 def replace_insane(char):
c587cbb7
AT
2096 if restricted and char in ACCENT_CHARS:
2097 return ACCENT_CHARS[char]
59ae15a5
PH
2098 if char == '?' or ord(char) < 32 or ord(char) == 127:
2099 return ''
2100 elif char == '"':
2101 return '' if restricted else '\''
2102 elif char == ':':
2103 return '_-' if restricted else ' -'
2104 elif char in '\\/|*<>':
2105 return '_'
627dcfff 2106 if restricted and (char in '!&\'()[]{}$;`^,#' or char.isspace()):
59ae15a5
PH
2107 return '_'
2108 if restricted and ord(char) > 127:
2109 return '_'
2110 return char
2111
639f1cea 2112 if s == '':
2113 return ''
2aeb06d6
PH
2114 # Handle timestamps
2115 s = re.sub(r'[0-9]+(?::[0-9]+)+', lambda m: m.group(0).replace(':', '_'), s)
28e614de 2116 result = ''.join(map(replace_insane, s))
796173d0
PH
2117 if not is_id:
2118 while '__' in result:
2119 result = result.replace('__', '_')
2120 result = result.strip('_')
2121 # Common case of "Foreign band name - English song title"
2122 if restricted and result.startswith('-_'):
2123 result = result[2:]
5a42414b
PH
2124 if result.startswith('-'):
2125 result = '_' + result[len('-'):]
a7440261 2126 result = result.lstrip('.')
796173d0
PH
2127 if not result:
2128 result = '_'
59ae15a5 2129 return result
d77c3dfd 2130
5f6a1245 2131
c2934512 2132def sanitize_path(s, force=False):
a2aaf4db 2133 """Sanitizes and normalizes path on Windows"""
c2934512 2134 if sys.platform == 'win32':
c4218ac3 2135 force = False
c2934512 2136 drive_or_unc, _ = os.path.splitdrive(s)
2137 if sys.version_info < (2, 7) and not drive_or_unc:
2138 drive_or_unc, _ = os.path.splitunc(s)
2139 elif force:
2140 drive_or_unc = ''
2141 else:
a2aaf4db 2142 return s
c2934512 2143
be531ef1
S
2144 norm_path = os.path.normpath(remove_start(s, drive_or_unc)).split(os.path.sep)
2145 if drive_or_unc:
a2aaf4db
S
2146 norm_path.pop(0)
2147 sanitized_path = [
ec85ded8 2148 path_part if path_part in ['.', '..'] else re.sub(r'(?:[/<>:"\|\\?\*]|[\s.]$)', '#', path_part)
a2aaf4db 2149 for path_part in norm_path]
be531ef1
S
2150 if drive_or_unc:
2151 sanitized_path.insert(0, drive_or_unc + os.path.sep)
c4218ac3 2152 elif force and s[0] == os.path.sep:
2153 sanitized_path.insert(0, os.path.sep)
a2aaf4db
S
2154 return os.path.join(*sanitized_path)
2155
2156
17bcc626 2157def sanitize_url(url):
befa4708
S
2158 # Prepend protocol-less URLs with `http:` scheme in order to mitigate
2159 # the number of unwanted failures due to missing protocol
2160 if url.startswith('//'):
2161 return 'http:%s' % url
2162 # Fix some common typos seen so far
2163 COMMON_TYPOS = (
067aa17e 2164 # https://github.com/ytdl-org/youtube-dl/issues/15649
befa4708
S
2165 (r'^httpss://', r'https://'),
2166 # https://bx1.be/lives/direct-tv/
2167 (r'^rmtp([es]?)://', r'rtmp\1://'),
2168 )
2169 for mistake, fixup in COMMON_TYPOS:
2170 if re.match(mistake, url):
2171 return re.sub(mistake, fixup, url)
bc6b9bcd 2172 return url
17bcc626
S
2173
2174
5435dcf9
HH
2175def extract_basic_auth(url):
2176 parts = compat_urlparse.urlsplit(url)
2177 if parts.username is None:
2178 return url, None
2179 url = compat_urlparse.urlunsplit(parts._replace(netloc=(
2180 parts.hostname if parts.port is None
2181 else '%s:%d' % (parts.hostname, parts.port))))
2182 auth_payload = base64.b64encode(
2183 ('%s:%s' % (parts.username, parts.password or '')).encode('utf-8'))
2184 return url, 'Basic ' + auth_payload.decode('utf-8')
2185
2186
67dda517 2187def sanitized_Request(url, *args, **kwargs):
bc6b9bcd 2188 url, auth_header = extract_basic_auth(escape_url(sanitize_url(url)))
5435dcf9
HH
2189 if auth_header is not None:
2190 headers = args[1] if len(args) >= 2 else kwargs.setdefault('headers', {})
2191 headers['Authorization'] = auth_header
2192 return compat_urllib_request.Request(url, *args, **kwargs)
67dda517
S
2193
2194
51098426
S
2195def expand_path(s):
2196 """Expand shell variables and ~"""
2197 return os.path.expandvars(compat_expanduser(s))
2198
2199
d77c3dfd 2200def orderedSet(iterable):
59ae15a5
PH
2201 """ Remove all duplicates from the input iterable """
2202 res = []
2203 for el in iterable:
2204 if el not in res:
2205 res.append(el)
2206 return res
d77c3dfd 2207
912b38b4 2208
55b2f099 2209def _htmlentity_transform(entity_with_semicolon):
4e408e47 2210 """Transforms an HTML entity to a character."""
55b2f099
YCH
2211 entity = entity_with_semicolon[:-1]
2212
4e408e47
PH
2213 # Known non-numeric HTML entity
2214 if entity in compat_html_entities.name2codepoint:
2215 return compat_chr(compat_html_entities.name2codepoint[entity])
2216
55b2f099
YCH
2217 # TODO: HTML5 allows entities without a semicolon. For example,
2218 # '&Eacuteric' should be decoded as 'Éric'.
2219 if entity_with_semicolon in compat_html_entities_html5:
2220 return compat_html_entities_html5[entity_with_semicolon]
2221
91757b0f 2222 mobj = re.match(r'#(x[0-9a-fA-F]+|[0-9]+)', entity)
4e408e47
PH
2223 if mobj is not None:
2224 numstr = mobj.group(1)
28e614de 2225 if numstr.startswith('x'):
4e408e47 2226 base = 16
28e614de 2227 numstr = '0%s' % numstr
4e408e47
PH
2228 else:
2229 base = 10
067aa17e 2230 # See https://github.com/ytdl-org/youtube-dl/issues/7518
7aefc49c
S
2231 try:
2232 return compat_chr(int(numstr, base))
2233 except ValueError:
2234 pass
4e408e47
PH
2235
2236 # Unknown entity in name, return its literal representation
7a3f0c00 2237 return '&%s;' % entity
4e408e47
PH
2238
2239
d77c3dfd 2240def unescapeHTML(s):
912b38b4
PH
2241 if s is None:
2242 return None
2243 assert type(s) == compat_str
d77c3dfd 2244
4e408e47 2245 return re.sub(
95f3f7c2 2246 r'&([^&;]+;)', lambda m: _htmlentity_transform(m.group(1)), s)
d77c3dfd 2247
8bf48f23 2248
cdb19aa4 2249def escapeHTML(text):
2250 return (
2251 text
2252 .replace('&', '&amp;')
2253 .replace('<', '&lt;')
2254 .replace('>', '&gt;')
2255 .replace('"', '&quot;')
2256 .replace("'", '&#39;')
2257 )
2258
2259
f5b1bca9 2260def process_communicate_or_kill(p, *args, **kwargs):
2261 try:
2262 return p.communicate(*args, **kwargs)
2263 except BaseException: # Including KeyboardInterrupt
2264 p.kill()
2265 p.wait()
2266 raise
2267
2268
aa49acd1
S
2269def get_subprocess_encoding():
2270 if sys.platform == 'win32' and sys.getwindowsversion()[0] >= 5:
2271 # For subprocess calls, encode with locale encoding
2272 # Refer to http://stackoverflow.com/a/9951851/35070
2273 encoding = preferredencoding()
2274 else:
2275 encoding = sys.getfilesystemencoding()
2276 if encoding is None:
2277 encoding = 'utf-8'
2278 return encoding
2279
2280
8bf48f23 2281def encodeFilename(s, for_subprocess=False):
59ae15a5
PH
2282 """
2283 @param s The name of the file
2284 """
d77c3dfd 2285
8bf48f23 2286 assert type(s) == compat_str
d77c3dfd 2287
59ae15a5
PH
2288 # Python 3 has a Unicode API
2289 if sys.version_info >= (3, 0):
2290 return s
0f00efed 2291
aa49acd1
S
2292 # Pass '' directly to use Unicode APIs on Windows 2000 and up
2293 # (Detecting Windows NT 4 is tricky because 'major >= 4' would
2294 # match Windows 9x series as well. Besides, NT 4 is obsolete.)
2295 if not for_subprocess and sys.platform == 'win32' and sys.getwindowsversion()[0] >= 5:
2296 return s
2297
8ee239e9
YCH
2298 # Jython assumes filenames are Unicode strings though reported as Python 2.x compatible
2299 if sys.platform.startswith('java'):
2300 return s
2301
aa49acd1
S
2302 return s.encode(get_subprocess_encoding(), 'ignore')
2303
2304
2305def decodeFilename(b, for_subprocess=False):
2306
2307 if sys.version_info >= (3, 0):
2308 return b
2309
2310 if not isinstance(b, bytes):
2311 return b
2312
2313 return b.decode(get_subprocess_encoding(), 'ignore')
8bf48f23 2314
f07b74fc
PH
2315
2316def encodeArgument(s):
2317 if not isinstance(s, compat_str):
2318 # Legacy code that uses byte strings
2319 # Uncomment the following line after fixing all post processors
7af808a5 2320 # assert False, 'Internal error: %r should be of type %r, is %r' % (s, compat_str, type(s))
f07b74fc
PH
2321 s = s.decode('ascii')
2322 return encodeFilename(s, True)
2323
2324
aa49acd1
S
2325def decodeArgument(b):
2326 return decodeFilename(b, True)
2327
2328
8271226a
PH
2329def decodeOption(optval):
2330 if optval is None:
2331 return optval
2332 if isinstance(optval, bytes):
2333 optval = optval.decode(preferredencoding())
2334
2335 assert isinstance(optval, compat_str)
2336 return optval
1c256f70 2337
5f6a1245 2338
cdb19aa4 2339def formatSeconds(secs, delim=':', msec=False):
4539dd30 2340 if secs > 3600:
cdb19aa4 2341 ret = '%d%s%02d%s%02d' % (secs // 3600, delim, (secs % 3600) // 60, delim, secs % 60)
4539dd30 2342 elif secs > 60:
cdb19aa4 2343 ret = '%d%s%02d' % (secs // 60, delim, secs % 60)
4539dd30 2344 else:
cdb19aa4 2345 ret = '%d' % secs
2346 return '%s.%03d' % (ret, secs % 1) if msec else ret
4539dd30 2347
a0ddb8a2 2348
be4a824d
PH
2349def make_HTTPS_handler(params, **kwargs):
2350 opts_no_check_certificate = params.get('nocheckcertificate', False)
0db261ba 2351 if hasattr(ssl, 'create_default_context'): # Python >= 3.4 or 2.7.9
be5f2c19 2352 context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
0db261ba 2353 if opts_no_check_certificate:
be5f2c19 2354 context.check_hostname = False
0db261ba 2355 context.verify_mode = ssl.CERT_NONE
a2366922 2356 try:
be4a824d 2357 return YoutubeDLHTTPSHandler(params, context=context, **kwargs)
a2366922
PH
2358 except TypeError:
2359 # Python 2.7.8
2360 # (create_default_context present but HTTPSHandler has no context=)
2361 pass
2362
2363 if sys.version_info < (3, 2):
d7932313 2364 return YoutubeDLHTTPSHandler(params, **kwargs)
aa37e3d4 2365 else: # Python < 3.4
d7932313 2366 context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ea6d901e 2367 context.verify_mode = (ssl.CERT_NONE
dca08720 2368 if opts_no_check_certificate
ea6d901e 2369 else ssl.CERT_REQUIRED)
303b479e 2370 context.set_default_verify_paths()
be4a824d 2371 return YoutubeDLHTTPSHandler(params, context=context, **kwargs)
ea6d901e 2372
732ea2f0 2373
5873d4cc 2374def bug_reports_message(before=';'):
08f2a92c 2375 if ytdl_is_updateable():
7a5c1cfe 2376 update_cmd = 'type yt-dlp -U to update'
08f2a92c 2377 else:
7a5c1cfe 2378 update_cmd = 'see https://github.com/yt-dlp/yt-dlp on how to update'
5873d4cc 2379 msg = 'please report this issue on https://github.com/yt-dlp/yt-dlp .'
08f2a92c 2380 msg += ' Make sure you are using the latest version; %s.' % update_cmd
7a5c1cfe 2381 msg += ' Be sure to call yt-dlp with the --verbose flag and include its complete output.'
5873d4cc
F
2382
2383 before = before.rstrip()
2384 if not before or before.endswith(('.', '!', '?')):
2385 msg = msg[0].title() + msg[1:]
2386
2387 return (before + ' ' if before else '') + msg
08f2a92c
JMF
2388
2389
bf5b9d85
PM
2390class YoutubeDLError(Exception):
2391 """Base exception for YoutubeDL errors."""
2392 pass
2393
2394
3158150c 2395network_exceptions = [compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error]
2396if hasattr(ssl, 'CertificateError'):
2397 network_exceptions.append(ssl.CertificateError)
2398network_exceptions = tuple(network_exceptions)
2399
2400
bf5b9d85 2401class ExtractorError(YoutubeDLError):
1c256f70 2402 """Error during info extraction."""
5f6a1245 2403
1151c407 2404 def __init__(self, msg, tb=None, expected=False, cause=None, video_id=None, ie=None):
9a82b238 2405 """ tb, if given, is the original traceback (so that it can be printed out).
7a5c1cfe 2406 If expected is set, this is a normal error message and most likely not a bug in yt-dlp.
9a82b238 2407 """
3158150c 2408 if sys.exc_info()[0] in network_exceptions:
9a82b238 2409 expected = True
d5979c5d 2410
526d74ec 2411 self.msg = str(msg)
1c256f70 2412 self.traceback = tb
1151c407 2413 self.expected = expected
2eabb802 2414 self.cause = cause
d11271dd 2415 self.video_id = video_id
1151c407 2416 self.ie = ie
2417 self.exc_info = sys.exc_info() # preserve original exception
2418
2419 super(ExtractorError, self).__init__(''.join((
2420 format_field(ie, template='[%s] '),
2421 format_field(video_id, template='%s: '),
526d74ec 2422 self.msg,
1151c407 2423 format_field(cause, template=' (caused by %r)'),
2424 '' if expected else bug_reports_message())))
1c256f70 2425
01951dda
PH
2426 def format_traceback(self):
2427 if self.traceback is None:
2428 return None
28e614de 2429 return ''.join(traceback.format_tb(self.traceback))
01951dda 2430
1c256f70 2431
416c7fcb
PH
2432class UnsupportedError(ExtractorError):
2433 def __init__(self, url):
2434 super(UnsupportedError, self).__init__(
2435 'Unsupported URL: %s' % url, expected=True)
2436 self.url = url
2437
2438
55b3e45b
JMF
2439class RegexNotFoundError(ExtractorError):
2440 """Error when a regex didn't match"""
2441 pass
2442
2443
773f291d
S
2444class GeoRestrictedError(ExtractorError):
2445 """Geographic restriction Error exception.
2446
2447 This exception may be thrown when a video is not available from your
2448 geographic location due to geographic restrictions imposed by a website.
2449 """
b6e0c7d2 2450
773f291d
S
2451 def __init__(self, msg, countries=None):
2452 super(GeoRestrictedError, self).__init__(msg, expected=True)
2453 self.msg = msg
2454 self.countries = countries
2455
2456
bf5b9d85 2457class DownloadError(YoutubeDLError):
59ae15a5 2458 """Download Error exception.
d77c3dfd 2459
59ae15a5
PH
2460 This exception may be thrown by FileDownloader objects if they are not
2461 configured to continue on errors. They will contain the appropriate
2462 error message.
2463 """
5f6a1245 2464
8cc83b8d
FV
2465 def __init__(self, msg, exc_info=None):
2466 """ exc_info, if given, is the original exception that caused the trouble (as returned by sys.exc_info()). """
2467 super(DownloadError, self).__init__(msg)
2468 self.exc_info = exc_info
d77c3dfd
FV
2469
2470
498f5606 2471class EntryNotInPlaylist(YoutubeDLError):
2472 """Entry not in playlist exception.
2473
2474 This exception will be thrown by YoutubeDL when a requested entry
2475 is not found in the playlist info_dict
2476 """
2477 pass
2478
2479
bf5b9d85 2480class SameFileError(YoutubeDLError):
59ae15a5 2481 """Same File exception.
d77c3dfd 2482
59ae15a5
PH
2483 This exception will be thrown by FileDownloader objects if they detect
2484 multiple files would have to be downloaded to the same file on disk.
2485 """
2486 pass
d77c3dfd
FV
2487
2488
bf5b9d85 2489class PostProcessingError(YoutubeDLError):
59ae15a5 2490 """Post Processing exception.
d77c3dfd 2491
59ae15a5
PH
2492 This exception may be raised by PostProcessor's .run() method to
2493 indicate an error in the postprocessing task.
2494 """
5f6a1245 2495
7851b379 2496 def __init__(self, msg):
bf5b9d85 2497 super(PostProcessingError, self).__init__(msg)
7851b379 2498 self.msg = msg
d77c3dfd 2499
5f6a1245 2500
8b0d7497 2501class ExistingVideoReached(YoutubeDLError):
2502 """ --max-downloads limit has been reached. """
2503 pass
2504
2505
2506class RejectedVideoReached(YoutubeDLError):
2507 """ --max-downloads limit has been reached. """
2508 pass
2509
2510
51d9739f 2511class ThrottledDownload(YoutubeDLError):
2512 """ Download speed below --throttled-rate. """
2513 pass
2514
2515
bf5b9d85 2516class MaxDownloadsReached(YoutubeDLError):
59ae15a5
PH
2517 """ --max-downloads limit has been reached. """
2518 pass
d77c3dfd
FV
2519
2520
bf5b9d85 2521class UnavailableVideoError(YoutubeDLError):
59ae15a5 2522 """Unavailable Format exception.
d77c3dfd 2523
59ae15a5
PH
2524 This exception will be thrown when a video is requested
2525 in a format that is not available for that video.
2526 """
2527 pass
d77c3dfd
FV
2528
2529
bf5b9d85 2530class ContentTooShortError(YoutubeDLError):
59ae15a5 2531 """Content Too Short exception.
d77c3dfd 2532
59ae15a5
PH
2533 This exception may be raised by FileDownloader objects when a file they
2534 download is too small for what the server announced first, indicating
2535 the connection was probably interrupted.
2536 """
d77c3dfd 2537
59ae15a5 2538 def __init__(self, downloaded, expected):
bf5b9d85
PM
2539 super(ContentTooShortError, self).__init__(
2540 'Downloaded {0} bytes, expected {1} bytes'.format(downloaded, expected)
2541 )
2c7ed247 2542 # Both in bytes
59ae15a5
PH
2543 self.downloaded = downloaded
2544 self.expected = expected
d77c3dfd 2545
5f6a1245 2546
bf5b9d85 2547class XAttrMetadataError(YoutubeDLError):
efa97bdc
YCH
2548 def __init__(self, code=None, msg='Unknown error'):
2549 super(XAttrMetadataError, self).__init__(msg)
2550 self.code = code
bd264412 2551 self.msg = msg
efa97bdc
YCH
2552
2553 # Parsing code and msg
3089bc74 2554 if (self.code in (errno.ENOSPC, errno.EDQUOT)
a0566bbf 2555 or 'No space left' in self.msg or 'Disk quota exceeded' in self.msg):
efa97bdc
YCH
2556 self.reason = 'NO_SPACE'
2557 elif self.code == errno.E2BIG or 'Argument list too long' in self.msg:
2558 self.reason = 'VALUE_TOO_LONG'
2559 else:
2560 self.reason = 'NOT_SUPPORTED'
2561
2562
bf5b9d85 2563class XAttrUnavailableError(YoutubeDLError):
efa97bdc
YCH
2564 pass
2565
2566
c5a59d93 2567def _create_http_connection(ydl_handler, http_class, is_https, *args, **kwargs):
e5e78797
S
2568 # Working around python 2 bug (see http://bugs.python.org/issue17849) by limiting
2569 # expected HTTP responses to meet HTTP/1.0 or later (see also
067aa17e 2570 # https://github.com/ytdl-org/youtube-dl/issues/6727)
e5e78797 2571 if sys.version_info < (3, 0):
65220c3b
S
2572 kwargs['strict'] = True
2573 hc = http_class(*args, **compat_kwargs(kwargs))
be4a824d 2574 source_address = ydl_handler._params.get('source_address')
8959018a 2575
be4a824d 2576 if source_address is not None:
8959018a
AU
2577 # This is to workaround _create_connection() from socket where it will try all
2578 # address data from getaddrinfo() including IPv6. This filters the result from
2579 # getaddrinfo() based on the source_address value.
2580 # This is based on the cpython socket.create_connection() function.
2581 # https://github.com/python/cpython/blob/master/Lib/socket.py#L691
2582 def _create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None):
2583 host, port = address
2584 err = None
2585 addrs = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
9e21e6d9
S
2586 af = socket.AF_INET if '.' in source_address[0] else socket.AF_INET6
2587 ip_addrs = [addr for addr in addrs if addr[0] == af]
2588 if addrs and not ip_addrs:
2589 ip_version = 'v4' if af == socket.AF_INET else 'v6'
2590 raise socket.error(
2591 "No remote IP%s addresses available for connect, can't use '%s' as source address"
2592 % (ip_version, source_address[0]))
8959018a
AU
2593 for res in ip_addrs:
2594 af, socktype, proto, canonname, sa = res
2595 sock = None
2596 try:
2597 sock = socket.socket(af, socktype, proto)
2598 if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
2599 sock.settimeout(timeout)
2600 sock.bind(source_address)
2601 sock.connect(sa)
2602 err = None # Explicitly break reference cycle
2603 return sock
2604 except socket.error as _:
2605 err = _
2606 if sock is not None:
2607 sock.close()
2608 if err is not None:
2609 raise err
2610 else:
9e21e6d9
S
2611 raise socket.error('getaddrinfo returns an empty list')
2612 if hasattr(hc, '_create_connection'):
2613 hc._create_connection = _create_connection
be4a824d
PH
2614 sa = (source_address, 0)
2615 if hasattr(hc, 'source_address'): # Python 2.7+
2616 hc.source_address = sa
2617 else: # Python 2.6
2618 def _hc_connect(self, *args, **kwargs):
9e21e6d9 2619 sock = _create_connection(
be4a824d
PH
2620 (self.host, self.port), self.timeout, sa)
2621 if is_https:
d7932313
PH
2622 self.sock = ssl.wrap_socket(
2623 sock, self.key_file, self.cert_file,
2624 ssl_version=ssl.PROTOCOL_TLSv1)
be4a824d
PH
2625 else:
2626 self.sock = sock
2627 hc.connect = functools.partial(_hc_connect, hc)
2628
2629 return hc
2630
2631
87f0e62d 2632def handle_youtubedl_headers(headers):
992fc9d6
YCH
2633 filtered_headers = headers
2634
2635 if 'Youtubedl-no-compression' in filtered_headers:
2636 filtered_headers = dict((k, v) for k, v in filtered_headers.items() if k.lower() != 'accept-encoding')
87f0e62d 2637 del filtered_headers['Youtubedl-no-compression']
87f0e62d 2638
992fc9d6 2639 return filtered_headers
87f0e62d
YCH
2640
2641
acebc9cd 2642class YoutubeDLHandler(compat_urllib_request.HTTPHandler):
59ae15a5
PH
2643 """Handler for HTTP requests and responses.
2644
2645 This class, when installed with an OpenerDirector, automatically adds
2646 the standard headers to every HTTP request and handles gzipped and
2647 deflated responses from web servers. If compression is to be avoided in
2648 a particular request, the original request in the program code only has
0424ec30 2649 to include the HTTP header "Youtubedl-no-compression", which will be
59ae15a5
PH
2650 removed before making the real request.
2651
2652 Part of this code was copied from:
2653
2654 http://techknack.net/python-urllib2-handlers/
2655
2656 Andrew Rowls, the author of that code, agreed to release it to the
2657 public domain.
2658 """
2659
be4a824d
PH
2660 def __init__(self, params, *args, **kwargs):
2661 compat_urllib_request.HTTPHandler.__init__(self, *args, **kwargs)
2662 self._params = params
2663
2664 def http_open(self, req):
71aff188
YCH
2665 conn_class = compat_http_client.HTTPConnection
2666
2667 socks_proxy = req.headers.get('Ytdl-socks-proxy')
2668 if socks_proxy:
2669 conn_class = make_socks_conn_class(conn_class, socks_proxy)
2670 del req.headers['Ytdl-socks-proxy']
2671
be4a824d 2672 return self.do_open(functools.partial(
71aff188 2673 _create_http_connection, self, conn_class, False),
be4a824d
PH
2674 req)
2675
59ae15a5
PH
2676 @staticmethod
2677 def deflate(data):
fc2119f2 2678 if not data:
2679 return data
59ae15a5
PH
2680 try:
2681 return zlib.decompress(data, -zlib.MAX_WBITS)
2682 except zlib.error:
2683 return zlib.decompress(data)
2684
acebc9cd 2685 def http_request(self, req):
51f267d9
S
2686 # According to RFC 3986, URLs can not contain non-ASCII characters, however this is not
2687 # always respected by websites, some tend to give out URLs with non percent-encoded
2688 # non-ASCII characters (see telemb.py, ard.py [#3412])
2689 # urllib chokes on URLs with non-ASCII characters (see http://bugs.python.org/issue3991)
2690 # To work around aforementioned issue we will replace request's original URL with
2691 # percent-encoded one
2692 # Since redirects are also affected (e.g. http://www.southpark.de/alle-episoden/s18e09)
2693 # the code of this workaround has been moved here from YoutubeDL.urlopen()
2694 url = req.get_full_url()
2695 url_escaped = escape_url(url)
2696
2697 # Substitute URL if any change after escaping
2698 if url != url_escaped:
15d260eb 2699 req = update_Request(req, url=url_escaped)
51f267d9 2700
33ac271b 2701 for h, v in std_headers.items():
3d5f7a39
JK
2702 # Capitalize is needed because of Python bug 2275: http://bugs.python.org/issue2275
2703 # The dict keys are capitalized because of this bug by urllib
2704 if h.capitalize() not in req.headers:
33ac271b 2705 req.add_header(h, v)
87f0e62d
YCH
2706
2707 req.headers = handle_youtubedl_headers(req.headers)
989b4b2b
PH
2708
2709 if sys.version_info < (2, 7) and '#' in req.get_full_url():
2710 # Python 2.6 is brain-dead when it comes to fragments
2711 req._Request__original = req._Request__original.partition('#')[0]
2712 req._Request__r_type = req._Request__r_type.partition('#')[0]
2713
59ae15a5
PH
2714 return req
2715
acebc9cd 2716 def http_response(self, req, resp):
59ae15a5
PH
2717 old_resp = resp
2718 # gzip
2719 if resp.headers.get('Content-encoding', '') == 'gzip':
aa3e9507
PH
2720 content = resp.read()
2721 gz = gzip.GzipFile(fileobj=io.BytesIO(content), mode='rb')
2722 try:
2723 uncompressed = io.BytesIO(gz.read())
2724 except IOError as original_ioerror:
2725 # There may be junk add the end of the file
2726 # See http://stackoverflow.com/q/4928560/35070 for details
2727 for i in range(1, 1024):
2728 try:
2729 gz = gzip.GzipFile(fileobj=io.BytesIO(content[:-i]), mode='rb')
2730 uncompressed = io.BytesIO(gz.read())
2731 except IOError:
2732 continue
2733 break
2734 else:
2735 raise original_ioerror
b407d853 2736 resp = compat_urllib_request.addinfourl(uncompressed, old_resp.headers, old_resp.url, old_resp.code)
59ae15a5 2737 resp.msg = old_resp.msg
c047270c 2738 del resp.headers['Content-encoding']
59ae15a5
PH
2739 # deflate
2740 if resp.headers.get('Content-encoding', '') == 'deflate':
2741 gz = io.BytesIO(self.deflate(resp.read()))
b407d853 2742 resp = compat_urllib_request.addinfourl(gz, old_resp.headers, old_resp.url, old_resp.code)
59ae15a5 2743 resp.msg = old_resp.msg
c047270c 2744 del resp.headers['Content-encoding']
ad729172 2745 # Percent-encode redirect URL of Location HTTP header to satisfy RFC 3986 (see
067aa17e 2746 # https://github.com/ytdl-org/youtube-dl/issues/6457).
5a4d9ddb
S
2747 if 300 <= resp.code < 400:
2748 location = resp.headers.get('Location')
2749 if location:
2750 # As of RFC 2616 default charset is iso-8859-1 that is respected by python 3
2751 if sys.version_info >= (3, 0):
2752 location = location.encode('iso-8859-1').decode('utf-8')
0ea59007
YCH
2753 else:
2754 location = location.decode('utf-8')
5a4d9ddb
S
2755 location_escaped = escape_url(location)
2756 if location != location_escaped:
2757 del resp.headers['Location']
9a4aec8b
YCH
2758 if sys.version_info < (3, 0):
2759 location_escaped = location_escaped.encode('utf-8')
5a4d9ddb 2760 resp.headers['Location'] = location_escaped
59ae15a5 2761 return resp
0f8d03f8 2762
acebc9cd
PH
2763 https_request = http_request
2764 https_response = http_response
bf50b038 2765
5de90176 2766
71aff188
YCH
2767def make_socks_conn_class(base_class, socks_proxy):
2768 assert issubclass(base_class, (
2769 compat_http_client.HTTPConnection, compat_http_client.HTTPSConnection))
2770
2771 url_components = compat_urlparse.urlparse(socks_proxy)
2772 if url_components.scheme.lower() == 'socks5':
2773 socks_type = ProxyType.SOCKS5
2774 elif url_components.scheme.lower() in ('socks', 'socks4'):
2775 socks_type = ProxyType.SOCKS4
51fb4995
YCH
2776 elif url_components.scheme.lower() == 'socks4a':
2777 socks_type = ProxyType.SOCKS4A
71aff188 2778
cdd94c2e
YCH
2779 def unquote_if_non_empty(s):
2780 if not s:
2781 return s
2782 return compat_urllib_parse_unquote_plus(s)
2783
71aff188
YCH
2784 proxy_args = (
2785 socks_type,
2786 url_components.hostname, url_components.port or 1080,
2787 True, # Remote DNS
cdd94c2e
YCH
2788 unquote_if_non_empty(url_components.username),
2789 unquote_if_non_empty(url_components.password),
71aff188
YCH
2790 )
2791
2792 class SocksConnection(base_class):
2793 def connect(self):
2794 self.sock = sockssocket()
2795 self.sock.setproxy(*proxy_args)
2796 if type(self.timeout) in (int, float):
2797 self.sock.settimeout(self.timeout)
2798 self.sock.connect((self.host, self.port))
2799
2800 if isinstance(self, compat_http_client.HTTPSConnection):
2801 if hasattr(self, '_context'): # Python > 2.6
2802 self.sock = self._context.wrap_socket(
2803 self.sock, server_hostname=self.host)
2804 else:
2805 self.sock = ssl.wrap_socket(self.sock)
2806
2807 return SocksConnection
2808
2809
be4a824d
PH
2810class YoutubeDLHTTPSHandler(compat_urllib_request.HTTPSHandler):
2811 def __init__(self, params, https_conn_class=None, *args, **kwargs):
2812 compat_urllib_request.HTTPSHandler.__init__(self, *args, **kwargs)
2813 self._https_conn_class = https_conn_class or compat_http_client.HTTPSConnection
2814 self._params = params
2815
2816 def https_open(self, req):
4f264c02 2817 kwargs = {}
71aff188
YCH
2818 conn_class = self._https_conn_class
2819
4f264c02
JMF
2820 if hasattr(self, '_context'): # python > 2.6
2821 kwargs['context'] = self._context
2822 if hasattr(self, '_check_hostname'): # python 3.x
2823 kwargs['check_hostname'] = self._check_hostname
71aff188
YCH
2824
2825 socks_proxy = req.headers.get('Ytdl-socks-proxy')
2826 if socks_proxy:
2827 conn_class = make_socks_conn_class(conn_class, socks_proxy)
2828 del req.headers['Ytdl-socks-proxy']
2829
be4a824d 2830 return self.do_open(functools.partial(
71aff188 2831 _create_http_connection, self, conn_class, True),
4f264c02 2832 req, **kwargs)
be4a824d
PH
2833
2834
1bab3437 2835class YoutubeDLCookieJar(compat_cookiejar.MozillaCookieJar):
f1a8511f
S
2836 """
2837 See [1] for cookie file format.
2838
2839 1. https://curl.haxx.se/docs/http-cookies.html
2840 """
e7e62441 2841 _HTTPONLY_PREFIX = '#HttpOnly_'
c380cc28
S
2842 _ENTRY_LEN = 7
2843 _HEADER = '''# Netscape HTTP Cookie File
7a5c1cfe 2844# This file is generated by yt-dlp. Do not edit.
c380cc28
S
2845
2846'''
2847 _CookieFileEntry = collections.namedtuple(
2848 'CookieFileEntry',
2849 ('domain_name', 'include_subdomains', 'path', 'https_only', 'expires_at', 'name', 'value'))
e7e62441 2850
1bab3437 2851 def save(self, filename=None, ignore_discard=False, ignore_expires=False):
c380cc28
S
2852 """
2853 Save cookies to a file.
2854
2855 Most of the code is taken from CPython 3.8 and slightly adapted
2856 to support cookie files with UTF-8 in both python 2 and 3.
2857 """
2858 if filename is None:
2859 if self.filename is not None:
2860 filename = self.filename
2861 else:
2862 raise ValueError(compat_cookiejar.MISSING_FILENAME_TEXT)
2863
1bab3437
S
2864 # Store session cookies with `expires` set to 0 instead of an empty
2865 # string
2866 for cookie in self:
2867 if cookie.expires is None:
2868 cookie.expires = 0
c380cc28
S
2869
2870 with io.open(filename, 'w', encoding='utf-8') as f:
2871 f.write(self._HEADER)
2872 now = time.time()
2873 for cookie in self:
2874 if not ignore_discard and cookie.discard:
2875 continue
2876 if not ignore_expires and cookie.is_expired(now):
2877 continue
2878 if cookie.secure:
2879 secure = 'TRUE'
2880 else:
2881 secure = 'FALSE'
2882 if cookie.domain.startswith('.'):
2883 initial_dot = 'TRUE'
2884 else:
2885 initial_dot = 'FALSE'
2886 if cookie.expires is not None:
2887 expires = compat_str(cookie.expires)
2888 else:
2889 expires = ''
2890 if cookie.value is None:
2891 # cookies.txt regards 'Set-Cookie: foo' as a cookie
2892 # with no name, whereas http.cookiejar regards it as a
2893 # cookie with no value.
2894 name = ''
2895 value = cookie.name
2896 else:
2897 name = cookie.name
2898 value = cookie.value
2899 f.write(
2900 '\t'.join([cookie.domain, initial_dot, cookie.path,
2901 secure, expires, name, value]) + '\n')
1bab3437
S
2902
2903 def load(self, filename=None, ignore_discard=False, ignore_expires=False):
e7e62441 2904 """Load cookies from a file."""
2905 if filename is None:
2906 if self.filename is not None:
2907 filename = self.filename
2908 else:
2909 raise ValueError(compat_cookiejar.MISSING_FILENAME_TEXT)
2910
c380cc28
S
2911 def prepare_line(line):
2912 if line.startswith(self._HTTPONLY_PREFIX):
2913 line = line[len(self._HTTPONLY_PREFIX):]
2914 # comments and empty lines are fine
2915 if line.startswith('#') or not line.strip():
2916 return line
2917 cookie_list = line.split('\t')
2918 if len(cookie_list) != self._ENTRY_LEN:
2919 raise compat_cookiejar.LoadError('invalid length %d' % len(cookie_list))
2920 cookie = self._CookieFileEntry(*cookie_list)
2921 if cookie.expires_at and not cookie.expires_at.isdigit():
2922 raise compat_cookiejar.LoadError('invalid expires at %s' % cookie.expires_at)
2923 return line
2924
e7e62441 2925 cf = io.StringIO()
c380cc28 2926 with io.open(filename, encoding='utf-8') as f:
e7e62441 2927 for line in f:
c380cc28
S
2928 try:
2929 cf.write(prepare_line(line))
2930 except compat_cookiejar.LoadError as e:
2931 write_string(
2932 'WARNING: skipping cookie file entry due to %s: %r\n'
2933 % (e, line), sys.stderr)
2934 continue
e7e62441 2935 cf.seek(0)
2936 self._really_load(cf, filename, ignore_discard, ignore_expires)
1bab3437
S
2937 # Session cookies are denoted by either `expires` field set to
2938 # an empty string or 0. MozillaCookieJar only recognizes the former
2939 # (see [1]). So we need force the latter to be recognized as session
2940 # cookies on our own.
2941 # Session cookies may be important for cookies-based authentication,
2942 # e.g. usually, when user does not check 'Remember me' check box while
2943 # logging in on a site, some important cookies are stored as session
2944 # cookies so that not recognizing them will result in failed login.
2945 # 1. https://bugs.python.org/issue17164
2946 for cookie in self:
2947 # Treat `expires=0` cookies as session cookies
2948 if cookie.expires == 0:
2949 cookie.expires = None
2950 cookie.discard = True
2951
2952
a6420bf5
S
2953class YoutubeDLCookieProcessor(compat_urllib_request.HTTPCookieProcessor):
2954 def __init__(self, cookiejar=None):
2955 compat_urllib_request.HTTPCookieProcessor.__init__(self, cookiejar)
2956
2957 def http_response(self, request, response):
2958 # Python 2 will choke on next HTTP request in row if there are non-ASCII
2959 # characters in Set-Cookie HTTP header of last response (see
067aa17e 2960 # https://github.com/ytdl-org/youtube-dl/issues/6769).
a6420bf5
S
2961 # In order to at least prevent crashing we will percent encode Set-Cookie
2962 # header before HTTPCookieProcessor starts processing it.
e28034c5
S
2963 # if sys.version_info < (3, 0) and response.headers:
2964 # for set_cookie_header in ('Set-Cookie', 'Set-Cookie2'):
2965 # set_cookie = response.headers.get(set_cookie_header)
2966 # if set_cookie:
2967 # set_cookie_escaped = compat_urllib_parse.quote(set_cookie, b"%/;:@&=+$,!~*'()?#[] ")
2968 # if set_cookie != set_cookie_escaped:
2969 # del response.headers[set_cookie_header]
2970 # response.headers[set_cookie_header] = set_cookie_escaped
a6420bf5
S
2971 return compat_urllib_request.HTTPCookieProcessor.http_response(self, request, response)
2972
f5fa042c 2973 https_request = compat_urllib_request.HTTPCookieProcessor.http_request
a6420bf5
S
2974 https_response = http_response
2975
2976
fca6dba8 2977class YoutubeDLRedirectHandler(compat_urllib_request.HTTPRedirectHandler):
201c1459 2978 """YoutubeDL redirect handler
2979
2980 The code is based on HTTPRedirectHandler implementation from CPython [1].
2981
2982 This redirect handler solves two issues:
2983 - ensures redirect URL is always unicode under python 2
2984 - introduces support for experimental HTTP response status code
2985 308 Permanent Redirect [2] used by some sites [3]
2986
2987 1. https://github.com/python/cpython/blob/master/Lib/urllib/request.py
2988 2. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/308
2989 3. https://github.com/ytdl-org/youtube-dl/issues/28768
2990 """
2991
2992 http_error_301 = http_error_303 = http_error_307 = http_error_308 = compat_urllib_request.HTTPRedirectHandler.http_error_302
2993
2994 def redirect_request(self, req, fp, code, msg, headers, newurl):
2995 """Return a Request or None in response to a redirect.
2996
2997 This is called by the http_error_30x methods when a
2998 redirection response is received. If a redirection should
2999 take place, return a new Request to allow http_error_30x to
3000 perform the redirect. Otherwise, raise HTTPError if no-one
3001 else should try to handle this url. Return None if you can't
3002 but another Handler might.
3003 """
3004 m = req.get_method()
3005 if (not (code in (301, 302, 303, 307, 308) and m in ("GET", "HEAD")
3006 or code in (301, 302, 303) and m == "POST")):
3007 raise compat_HTTPError(req.full_url, code, msg, headers, fp)
3008 # Strictly (according to RFC 2616), 301 or 302 in response to
3009 # a POST MUST NOT cause a redirection without confirmation
3010 # from the user (of urllib.request, in this case). In practice,
3011 # essentially all clients do redirect in this case, so we do
3012 # the same.
3013
3014 # On python 2 urlh.geturl() may sometimes return redirect URL
3015 # as byte string instead of unicode. This workaround allows
3016 # to force it always return unicode.
3017 if sys.version_info[0] < 3:
3018 newurl = compat_str(newurl)
3019
3020 # Be conciliant with URIs containing a space. This is mainly
3021 # redundant with the more complete encoding done in http_error_302(),
3022 # but it is kept for compatibility with other callers.
3023 newurl = newurl.replace(' ', '%20')
3024
3025 CONTENT_HEADERS = ("content-length", "content-type")
3026 # NB: don't use dict comprehension for python 2.6 compatibility
3027 newheaders = dict((k, v) for k, v in req.headers.items()
3028 if k.lower() not in CONTENT_HEADERS)
3029 return compat_urllib_request.Request(
3030 newurl, headers=newheaders, origin_req_host=req.origin_req_host,
3031 unverifiable=True)
fca6dba8
S
3032
3033
46f59e89
S
3034def extract_timezone(date_str):
3035 m = re.search(
3036 r'^.{8,}?(?P<tz>Z$| ?(?P<sign>\+|-)(?P<hours>[0-9]{2}):?(?P<minutes>[0-9]{2})$)',
3037 date_str)
3038 if not m:
3039 timezone = datetime.timedelta()
3040 else:
3041 date_str = date_str[:-len(m.group('tz'))]
3042 if not m.group('sign'):
3043 timezone = datetime.timedelta()
3044 else:
3045 sign = 1 if m.group('sign') == '+' else -1
3046 timezone = datetime.timedelta(
3047 hours=sign * int(m.group('hours')),
3048 minutes=sign * int(m.group('minutes')))
3049 return timezone, date_str
3050
3051
08b38d54 3052def parse_iso8601(date_str, delimiter='T', timezone=None):
912b38b4
PH
3053 """ Return a UNIX timestamp from the given date """
3054
3055 if date_str is None:
3056 return None
3057
52c3a6e4
S
3058 date_str = re.sub(r'\.[0-9]+', '', date_str)
3059
08b38d54 3060 if timezone is None:
46f59e89
S
3061 timezone, date_str = extract_timezone(date_str)
3062
52c3a6e4
S
3063 try:
3064 date_format = '%Y-%m-%d{0}%H:%M:%S'.format(delimiter)
3065 dt = datetime.datetime.strptime(date_str, date_format) - timezone
3066 return calendar.timegm(dt.timetuple())
3067 except ValueError:
3068 pass
912b38b4
PH
3069
3070
46f59e89
S
3071def date_formats(day_first=True):
3072 return DATE_FORMATS_DAY_FIRST if day_first else DATE_FORMATS_MONTH_FIRST
3073
3074
42bdd9d0 3075def unified_strdate(date_str, day_first=True):
bf50b038 3076 """Return a string with the date in the format YYYYMMDD"""
64e7ad60
PH
3077
3078 if date_str is None:
3079 return None
bf50b038 3080 upload_date = None
5f6a1245 3081 # Replace commas
026fcc04 3082 date_str = date_str.replace(',', ' ')
42bdd9d0 3083 # Remove AM/PM + timezone
9bb8e0a3 3084 date_str = re.sub(r'(?i)\s*(?:AM|PM)(?:\s+[A-Z]+)?', '', date_str)
46f59e89 3085 _, date_str = extract_timezone(date_str)
42bdd9d0 3086
46f59e89 3087 for expression in date_formats(day_first):
bf50b038
JMF
3088 try:
3089 upload_date = datetime.datetime.strptime(date_str, expression).strftime('%Y%m%d')
5de90176 3090 except ValueError:
bf50b038 3091 pass
42393ce2
PH
3092 if upload_date is None:
3093 timetuple = email.utils.parsedate_tz(date_str)
3094 if timetuple:
c6b9cf05
S
3095 try:
3096 upload_date = datetime.datetime(*timetuple[:6]).strftime('%Y%m%d')
3097 except ValueError:
3098 pass
6a750402
JMF
3099 if upload_date is not None:
3100 return compat_str(upload_date)
bf50b038 3101
5f6a1245 3102
46f59e89
S
3103def unified_timestamp(date_str, day_first=True):
3104 if date_str is None:
3105 return None
3106
2ae2ffda 3107 date_str = re.sub(r'[,|]', '', date_str)
46f59e89 3108
7dc2a74e 3109 pm_delta = 12 if re.search(r'(?i)PM', date_str) else 0
46f59e89
S
3110 timezone, date_str = extract_timezone(date_str)
3111
3112 # Remove AM/PM + timezone
3113 date_str = re.sub(r'(?i)\s*(?:AM|PM)(?:\s+[A-Z]+)?', '', date_str)
3114
deef3195
S
3115 # Remove unrecognized timezones from ISO 8601 alike timestamps
3116 m = re.search(r'\d{1,2}:\d{1,2}(?:\.\d+)?(?P<tz>\s*[A-Z]+)$', date_str)
3117 if m:
3118 date_str = date_str[:-len(m.group('tz'))]
3119
f226880c
PH
3120 # Python only supports microseconds, so remove nanoseconds
3121 m = re.search(r'^([0-9]{4,}-[0-9]{1,2}-[0-9]{1,2}T[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}\.[0-9]{6})[0-9]+$', date_str)
3122 if m:
3123 date_str = m.group(1)
3124
46f59e89
S
3125 for expression in date_formats(day_first):
3126 try:
7dc2a74e 3127 dt = datetime.datetime.strptime(date_str, expression) - timezone + datetime.timedelta(hours=pm_delta)
46f59e89
S
3128 return calendar.timegm(dt.timetuple())
3129 except ValueError:
3130 pass
3131 timetuple = email.utils.parsedate_tz(date_str)
3132 if timetuple:
7dc2a74e 3133 return calendar.timegm(timetuple) + pm_delta * 3600
46f59e89
S
3134
3135
28e614de 3136def determine_ext(url, default_ext='unknown_video'):
85750f89 3137 if url is None or '.' not in url:
f4776371 3138 return default_ext
9cb9a5df 3139 guess = url.partition('?')[0].rpartition('.')[2]
73e79f2a
PH
3140 if re.match(r'^[A-Za-z0-9]+$', guess):
3141 return guess
a7aaa398
S
3142 # Try extract ext from URLs like http://example.com/foo/bar.mp4/?download
3143 elif guess.rstrip('/') in KNOWN_EXTENSIONS:
9cb9a5df 3144 return guess.rstrip('/')
73e79f2a 3145 else:
cbdbb766 3146 return default_ext
73e79f2a 3147
5f6a1245 3148
824fa511
S
3149def subtitles_filename(filename, sub_lang, sub_format, expected_real_ext=None):
3150 return replace_extension(filename, sub_lang + '.' + sub_format, expected_real_ext)
d4051a8e 3151
5f6a1245 3152
9e62f283 3153def datetime_from_str(date_str, precision='auto', format='%Y%m%d'):
37254abc
JMF
3154 """
3155 Return a datetime object from a string in the format YYYYMMDD or
9e62f283 3156 (now|today|date)[+-][0-9](microsecond|second|minute|hour|day|week|month|year)(s)?
3157
3158 format: string date format used to return datetime object from
3159 precision: round the time portion of a datetime object.
3160 auto|microsecond|second|minute|hour|day.
3161 auto: round to the unit provided in date_str (if applicable).
3162 """
3163 auto_precision = False
3164 if precision == 'auto':
3165 auto_precision = True
3166 precision = 'microsecond'
3167 today = datetime_round(datetime.datetime.now(), precision)
f8795e10 3168 if date_str in ('now', 'today'):
37254abc 3169 return today
f8795e10
PH
3170 if date_str == 'yesterday':
3171 return today - datetime.timedelta(days=1)
9e62f283 3172 match = re.match(
3173 r'(?P<start>.+)(?P<sign>[+-])(?P<time>\d+)(?P<unit>microsecond|second|minute|hour|day|week|month|year)(s)?',
3174 date_str)
37254abc 3175 if match is not None:
9e62f283 3176 start_time = datetime_from_str(match.group('start'), precision, format)
3177 time = int(match.group('time')) * (-1 if match.group('sign') == '-' else 1)
37254abc 3178 unit = match.group('unit')
9e62f283 3179 if unit == 'month' or unit == 'year':
3180 new_date = datetime_add_months(start_time, time * 12 if unit == 'year' else time)
37254abc 3181 unit = 'day'
9e62f283 3182 else:
3183 if unit == 'week':
3184 unit = 'day'
3185 time *= 7
3186 delta = datetime.timedelta(**{unit + 's': time})
3187 new_date = start_time + delta
3188 if auto_precision:
3189 return datetime_round(new_date, unit)
3190 return new_date
3191
3192 return datetime_round(datetime.datetime.strptime(date_str, format), precision)
3193
3194
3195def date_from_str(date_str, format='%Y%m%d'):
3196 """
3197 Return a datetime object from a string in the format YYYYMMDD or
3198 (now|today|date)[+-][0-9](microsecond|second|minute|hour|day|week|month|year)(s)?
3199
3200 format: string date format used to return datetime object from
3201 """
3202 return datetime_from_str(date_str, precision='microsecond', format=format).date()
3203
3204
3205def datetime_add_months(dt, months):
3206 """Increment/Decrement a datetime object by months."""
3207 month = dt.month + months - 1
3208 year = dt.year + month // 12
3209 month = month % 12 + 1
3210 day = min(dt.day, calendar.monthrange(year, month)[1])
3211 return dt.replace(year, month, day)
3212
3213
3214def datetime_round(dt, precision='day'):
3215 """
3216 Round a datetime object's time to a specific precision
3217 """
3218 if precision == 'microsecond':
3219 return dt
3220
3221 unit_seconds = {
3222 'day': 86400,
3223 'hour': 3600,
3224 'minute': 60,
3225 'second': 1,
3226 }
3227 roundto = lambda x, n: ((x + n / 2) // n) * n
3228 timestamp = calendar.timegm(dt.timetuple())
3229 return datetime.datetime.utcfromtimestamp(roundto(timestamp, unit_seconds[precision]))
5f6a1245
JW
3230
3231
e63fc1be 3232def hyphenate_date(date_str):
3233 """
3234 Convert a date in 'YYYYMMDD' format to 'YYYY-MM-DD' format"""
3235 match = re.match(r'^(\d\d\d\d)(\d\d)(\d\d)$', date_str)
3236 if match is not None:
3237 return '-'.join(match.groups())
3238 else:
3239 return date_str
3240
5f6a1245 3241
bd558525
JMF
3242class DateRange(object):
3243 """Represents a time interval between two dates"""
5f6a1245 3244
bd558525
JMF
3245 def __init__(self, start=None, end=None):
3246 """start and end must be strings in the format accepted by date"""
3247 if start is not None:
3248 self.start = date_from_str(start)
3249 else:
3250 self.start = datetime.datetime.min.date()
3251 if end is not None:
3252 self.end = date_from_str(end)
3253 else:
3254 self.end = datetime.datetime.max.date()
37254abc 3255 if self.start > self.end:
bd558525 3256 raise ValueError('Date range: "%s" , the start date must be before the end date' % self)
5f6a1245 3257
bd558525
JMF
3258 @classmethod
3259 def day(cls, day):
3260 """Returns a range that only contains the given day"""
5f6a1245
JW
3261 return cls(day, day)
3262
bd558525
JMF
3263 def __contains__(self, date):
3264 """Check if the date is in the range"""
37254abc
JMF
3265 if not isinstance(date, datetime.date):
3266 date = date_from_str(date)
3267 return self.start <= date <= self.end
5f6a1245 3268
bd558525 3269 def __str__(self):
5f6a1245 3270 return '%s - %s' % (self.start.isoformat(), self.end.isoformat())
c496ca96
PH
3271
3272
3273def platform_name():
3274 """ Returns the platform name as a compat_str """
3275 res = platform.platform()
3276 if isinstance(res, bytes):
3277 res = res.decode(preferredencoding())
3278
3279 assert isinstance(res, compat_str)
3280 return res
c257baff
PH
3281
3282
b58ddb32
PH
3283def _windows_write_string(s, out):
3284 """ Returns True if the string was written using special methods,
3285 False if it has yet to be written out."""
3286 # Adapted from http://stackoverflow.com/a/3259271/35070
3287
3288 import ctypes
3289 import ctypes.wintypes
3290
3291 WIN_OUTPUT_IDS = {
3292 1: -11,
3293 2: -12,
3294 }
3295
a383a98a
PH
3296 try:
3297 fileno = out.fileno()
3298 except AttributeError:
3299 # If the output stream doesn't have a fileno, it's virtual
3300 return False
aa42e873
PH
3301 except io.UnsupportedOperation:
3302 # Some strange Windows pseudo files?
3303 return False
b58ddb32
PH
3304 if fileno not in WIN_OUTPUT_IDS:
3305 return False
3306
d7cd9a9e 3307 GetStdHandle = compat_ctypes_WINFUNCTYPE(
b58ddb32 3308 ctypes.wintypes.HANDLE, ctypes.wintypes.DWORD)(
d7cd9a9e 3309 ('GetStdHandle', ctypes.windll.kernel32))
b58ddb32
PH
3310 h = GetStdHandle(WIN_OUTPUT_IDS[fileno])
3311
d7cd9a9e 3312 WriteConsoleW = compat_ctypes_WINFUNCTYPE(
b58ddb32
PH
3313 ctypes.wintypes.BOOL, ctypes.wintypes.HANDLE, ctypes.wintypes.LPWSTR,
3314 ctypes.wintypes.DWORD, ctypes.POINTER(ctypes.wintypes.DWORD),
d7cd9a9e 3315 ctypes.wintypes.LPVOID)(('WriteConsoleW', ctypes.windll.kernel32))
b58ddb32
PH
3316 written = ctypes.wintypes.DWORD(0)
3317
d7cd9a9e 3318 GetFileType = compat_ctypes_WINFUNCTYPE(ctypes.wintypes.DWORD, ctypes.wintypes.DWORD)(('GetFileType', ctypes.windll.kernel32))
b58ddb32
PH
3319 FILE_TYPE_CHAR = 0x0002
3320 FILE_TYPE_REMOTE = 0x8000
d7cd9a9e 3321 GetConsoleMode = compat_ctypes_WINFUNCTYPE(
b58ddb32
PH
3322 ctypes.wintypes.BOOL, ctypes.wintypes.HANDLE,
3323 ctypes.POINTER(ctypes.wintypes.DWORD))(
d7cd9a9e 3324 ('GetConsoleMode', ctypes.windll.kernel32))
b58ddb32
PH
3325 INVALID_HANDLE_VALUE = ctypes.wintypes.DWORD(-1).value
3326
3327 def not_a_console(handle):
3328 if handle == INVALID_HANDLE_VALUE or handle is None:
3329 return True
3089bc74
S
3330 return ((GetFileType(handle) & ~FILE_TYPE_REMOTE) != FILE_TYPE_CHAR
3331 or GetConsoleMode(handle, ctypes.byref(ctypes.wintypes.DWORD())) == 0)
b58ddb32
PH
3332
3333 if not_a_console(h):
3334 return False
3335
d1b9c912
PH
3336 def next_nonbmp_pos(s):
3337 try:
3338 return next(i for i, c in enumerate(s) if ord(c) > 0xffff)
3339 except StopIteration:
3340 return len(s)
3341
3342 while s:
3343 count = min(next_nonbmp_pos(s), 1024)
3344
b58ddb32 3345 ret = WriteConsoleW(
d1b9c912 3346 h, s, count if count else 2, ctypes.byref(written), None)
b58ddb32
PH
3347 if ret == 0:
3348 raise OSError('Failed to write string')
d1b9c912
PH
3349 if not count: # We just wrote a non-BMP character
3350 assert written.value == 2
3351 s = s[1:]
3352 else:
3353 assert written.value > 0
3354 s = s[written.value:]
b58ddb32
PH
3355 return True
3356
3357
734f90bb 3358def write_string(s, out=None, encoding=None):
7459e3a2
PH
3359 if out is None:
3360 out = sys.stderr
8bf48f23 3361 assert type(s) == compat_str
7459e3a2 3362
b58ddb32
PH
3363 if sys.platform == 'win32' and encoding is None and hasattr(out, 'fileno'):
3364 if _windows_write_string(s, out):
3365 return
3366
3089bc74
S
3367 if ('b' in getattr(out, 'mode', '')
3368 or sys.version_info[0] < 3): # Python 2 lies about mode of sys.stderr
104aa738
PH
3369 byt = s.encode(encoding or preferredencoding(), 'ignore')
3370 out.write(byt)
3371 elif hasattr(out, 'buffer'):
3372 enc = encoding or getattr(out, 'encoding', None) or preferredencoding()
3373 byt = s.encode(enc, 'ignore')
3374 out.buffer.write(byt)
3375 else:
8bf48f23 3376 out.write(s)
7459e3a2
PH
3377 out.flush()
3378
3379
48ea9cea
PH
3380def bytes_to_intlist(bs):
3381 if not bs:
3382 return []
3383 if isinstance(bs[0], int): # Python 3
3384 return list(bs)
3385 else:
3386 return [ord(c) for c in bs]
3387
c257baff 3388
cba892fa 3389def intlist_to_bytes(xs):
3390 if not xs:
3391 return b''
edaa23f8 3392 return compat_struct_pack('%dB' % len(xs), *xs)
c38b1e77
PH
3393
3394
c1c9a79c
PH
3395# Cross-platform file locking
3396if sys.platform == 'win32':
3397 import ctypes.wintypes
3398 import msvcrt
3399
3400 class OVERLAPPED(ctypes.Structure):
3401 _fields_ = [
3402 ('Internal', ctypes.wintypes.LPVOID),
3403 ('InternalHigh', ctypes.wintypes.LPVOID),
3404 ('Offset', ctypes.wintypes.DWORD),
3405 ('OffsetHigh', ctypes.wintypes.DWORD),
3406 ('hEvent', ctypes.wintypes.HANDLE),
3407 ]
3408
3409 kernel32 = ctypes.windll.kernel32
3410 LockFileEx = kernel32.LockFileEx
3411 LockFileEx.argtypes = [
3412 ctypes.wintypes.HANDLE, # hFile
3413 ctypes.wintypes.DWORD, # dwFlags
3414 ctypes.wintypes.DWORD, # dwReserved
3415 ctypes.wintypes.DWORD, # nNumberOfBytesToLockLow
3416 ctypes.wintypes.DWORD, # nNumberOfBytesToLockHigh
3417 ctypes.POINTER(OVERLAPPED) # Overlapped
3418 ]
3419 LockFileEx.restype = ctypes.wintypes.BOOL
3420 UnlockFileEx = kernel32.UnlockFileEx
3421 UnlockFileEx.argtypes = [
3422 ctypes.wintypes.HANDLE, # hFile
3423 ctypes.wintypes.DWORD, # dwReserved
3424 ctypes.wintypes.DWORD, # nNumberOfBytesToLockLow
3425 ctypes.wintypes.DWORD, # nNumberOfBytesToLockHigh
3426 ctypes.POINTER(OVERLAPPED) # Overlapped
3427 ]
3428 UnlockFileEx.restype = ctypes.wintypes.BOOL
3429 whole_low = 0xffffffff
3430 whole_high = 0x7fffffff
3431
3432 def _lock_file(f, exclusive):
3433 overlapped = OVERLAPPED()
3434 overlapped.Offset = 0
3435 overlapped.OffsetHigh = 0
3436 overlapped.hEvent = 0
3437 f._lock_file_overlapped_p = ctypes.pointer(overlapped)
3438 handle = msvcrt.get_osfhandle(f.fileno())
3439 if not LockFileEx(handle, 0x2 if exclusive else 0x0, 0,
3440 whole_low, whole_high, f._lock_file_overlapped_p):
3441 raise OSError('Locking file failed: %r' % ctypes.FormatError())
3442
3443 def _unlock_file(f):
3444 assert f._lock_file_overlapped_p
3445 handle = msvcrt.get_osfhandle(f.fileno())
3446 if not UnlockFileEx(handle, 0,
3447 whole_low, whole_high, f._lock_file_overlapped_p):
3448 raise OSError('Unlocking file failed: %r' % ctypes.FormatError())
3449
3450else:
399a76e6
YCH
3451 # Some platforms, such as Jython, is missing fcntl
3452 try:
3453 import fcntl
c1c9a79c 3454
399a76e6
YCH
3455 def _lock_file(f, exclusive):
3456 fcntl.flock(f, fcntl.LOCK_EX if exclusive else fcntl.LOCK_SH)
c1c9a79c 3457
399a76e6
YCH
3458 def _unlock_file(f):
3459 fcntl.flock(f, fcntl.LOCK_UN)
3460 except ImportError:
3461 UNSUPPORTED_MSG = 'file locking is not supported on this platform'
3462
3463 def _lock_file(f, exclusive):
3464 raise IOError(UNSUPPORTED_MSG)
3465
3466 def _unlock_file(f):
3467 raise IOError(UNSUPPORTED_MSG)
c1c9a79c
PH
3468
3469
3470class locked_file(object):
3471 def __init__(self, filename, mode, encoding=None):
3472 assert mode in ['r', 'a', 'w']
3473 self.f = io.open(filename, mode, encoding=encoding)
3474 self.mode = mode
3475
3476 def __enter__(self):
3477 exclusive = self.mode != 'r'
3478 try:
3479 _lock_file(self.f, exclusive)
3480 except IOError:
3481 self.f.close()
3482 raise
3483 return self
3484
3485 def __exit__(self, etype, value, traceback):
3486 try:
3487 _unlock_file(self.f)
3488 finally:
3489 self.f.close()
3490
3491 def __iter__(self):
3492 return iter(self.f)
3493
3494 def write(self, *args):
3495 return self.f.write(*args)
3496
3497 def read(self, *args):
3498 return self.f.read(*args)
4eb7f1d1
JMF
3499
3500
4644ac55
S
3501def get_filesystem_encoding():
3502 encoding = sys.getfilesystemencoding()
3503 return encoding if encoding is not None else 'utf-8'
3504
3505
4eb7f1d1 3506def shell_quote(args):
a6a173c2 3507 quoted_args = []
4644ac55 3508 encoding = get_filesystem_encoding()
a6a173c2
JMF
3509 for a in args:
3510 if isinstance(a, bytes):
3511 # We may get a filename encoded with 'encodeFilename'
3512 a = a.decode(encoding)
aefce8e6 3513 quoted_args.append(compat_shlex_quote(a))
28e614de 3514 return ' '.join(quoted_args)
9d4660ca
PH
3515
3516
3517def smuggle_url(url, data):
3518 """ Pass additional data in a URL for internal use. """
3519
81953d1a
RA
3520 url, idata = unsmuggle_url(url, {})
3521 data.update(idata)
15707c7e 3522 sdata = compat_urllib_parse_urlencode(
28e614de
PH
3523 {'__youtubedl_smuggle': json.dumps(data)})
3524 return url + '#' + sdata
9d4660ca
PH
3525
3526
79f82953 3527def unsmuggle_url(smug_url, default=None):
83e865a3 3528 if '#__youtubedl_smuggle' not in smug_url:
79f82953 3529 return smug_url, default
28e614de
PH
3530 url, _, sdata = smug_url.rpartition('#')
3531 jsond = compat_parse_qs(sdata)['__youtubedl_smuggle'][0]
9d4660ca
PH
3532 data = json.loads(jsond)
3533 return url, data
02dbf93f
PH
3534
3535
02dbf93f
PH
3536def format_bytes(bytes):
3537 if bytes is None:
28e614de 3538 return 'N/A'
02dbf93f
PH
3539 if type(bytes) is str:
3540 bytes = float(bytes)
3541 if bytes == 0.0:
3542 exponent = 0
3543 else:
3544 exponent = int(math.log(bytes, 1024.0))
28e614de 3545 suffix = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'][exponent]
02dbf93f 3546 converted = float(bytes) / float(1024 ** exponent)
28e614de 3547 return '%.2f%s' % (converted, suffix)
f53c966a 3548
1c088fa8 3549
fb47597b
S
3550def lookup_unit_table(unit_table, s):
3551 units_re = '|'.join(re.escape(u) for u in unit_table)
3552 m = re.match(
782b1b5b 3553 r'(?P<num>[0-9]+(?:[,.][0-9]*)?)\s*(?P<unit>%s)\b' % units_re, s)
fb47597b
S
3554 if not m:
3555 return None
3556 num_str = m.group('num').replace(',', '.')
3557 mult = unit_table[m.group('unit')]
3558 return int(float(num_str) * mult)
3559
3560
be64b5b0
PH
3561def parse_filesize(s):
3562 if s is None:
3563 return None
3564
dfb1b146 3565 # The lower-case forms are of course incorrect and unofficial,
be64b5b0
PH
3566 # but we support those too
3567 _UNIT_TABLE = {
3568 'B': 1,
3569 'b': 1,
70852b47 3570 'bytes': 1,
be64b5b0
PH
3571 'KiB': 1024,
3572 'KB': 1000,
3573 'kB': 1024,
3574 'Kb': 1000,
13585d76 3575 'kb': 1000,
70852b47
YCH
3576 'kilobytes': 1000,
3577 'kibibytes': 1024,
be64b5b0
PH
3578 'MiB': 1024 ** 2,
3579 'MB': 1000 ** 2,
3580 'mB': 1024 ** 2,
3581 'Mb': 1000 ** 2,
13585d76 3582 'mb': 1000 ** 2,
70852b47
YCH
3583 'megabytes': 1000 ** 2,
3584 'mebibytes': 1024 ** 2,
be64b5b0
PH
3585 'GiB': 1024 ** 3,
3586 'GB': 1000 ** 3,
3587 'gB': 1024 ** 3,
3588 'Gb': 1000 ** 3,
13585d76 3589 'gb': 1000 ** 3,
70852b47
YCH
3590 'gigabytes': 1000 ** 3,
3591 'gibibytes': 1024 ** 3,
be64b5b0
PH
3592 'TiB': 1024 ** 4,
3593 'TB': 1000 ** 4,
3594 'tB': 1024 ** 4,
3595 'Tb': 1000 ** 4,
13585d76 3596 'tb': 1000 ** 4,
70852b47
YCH
3597 'terabytes': 1000 ** 4,
3598 'tebibytes': 1024 ** 4,
be64b5b0
PH
3599 'PiB': 1024 ** 5,
3600 'PB': 1000 ** 5,
3601 'pB': 1024 ** 5,
3602 'Pb': 1000 ** 5,
13585d76 3603 'pb': 1000 ** 5,
70852b47
YCH
3604 'petabytes': 1000 ** 5,
3605 'pebibytes': 1024 ** 5,
be64b5b0
PH
3606 'EiB': 1024 ** 6,
3607 'EB': 1000 ** 6,
3608 'eB': 1024 ** 6,
3609 'Eb': 1000 ** 6,
13585d76 3610 'eb': 1000 ** 6,
70852b47
YCH
3611 'exabytes': 1000 ** 6,
3612 'exbibytes': 1024 ** 6,
be64b5b0
PH
3613 'ZiB': 1024 ** 7,
3614 'ZB': 1000 ** 7,
3615 'zB': 1024 ** 7,
3616 'Zb': 1000 ** 7,
13585d76 3617 'zb': 1000 ** 7,
70852b47
YCH
3618 'zettabytes': 1000 ** 7,
3619 'zebibytes': 1024 ** 7,
be64b5b0
PH
3620 'YiB': 1024 ** 8,
3621 'YB': 1000 ** 8,
3622 'yB': 1024 ** 8,
3623 'Yb': 1000 ** 8,
13585d76 3624 'yb': 1000 ** 8,
70852b47
YCH
3625 'yottabytes': 1000 ** 8,
3626 'yobibytes': 1024 ** 8,
be64b5b0
PH
3627 }
3628
fb47597b
S
3629 return lookup_unit_table(_UNIT_TABLE, s)
3630
3631
3632def parse_count(s):
3633 if s is None:
be64b5b0
PH
3634 return None
3635
fb47597b
S
3636 s = s.strip()
3637
3638 if re.match(r'^[\d,.]+$', s):
3639 return str_to_int(s)
3640
3641 _UNIT_TABLE = {
3642 'k': 1000,
3643 'K': 1000,
3644 'm': 1000 ** 2,
3645 'M': 1000 ** 2,
3646 'kk': 1000 ** 2,
3647 'KK': 1000 ** 2,
3648 }
be64b5b0 3649
fb47597b 3650 return lookup_unit_table(_UNIT_TABLE, s)
be64b5b0 3651
2f7ae819 3652
b871d7e9
S
3653def parse_resolution(s):
3654 if s is None:
3655 return {}
3656
3657 mobj = re.search(r'\b(?P<w>\d+)\s*[xX×]\s*(?P<h>\d+)\b', s)
3658 if mobj:
3659 return {
3660 'width': int(mobj.group('w')),
3661 'height': int(mobj.group('h')),
3662 }
3663
3664 mobj = re.search(r'\b(\d+)[pPiI]\b', s)
3665 if mobj:
3666 return {'height': int(mobj.group(1))}
3667
3668 mobj = re.search(r'\b([48])[kK]\b', s)
3669 if mobj:
3670 return {'height': int(mobj.group(1)) * 540}
3671
3672 return {}
3673
3674
0dc41787
S
3675def parse_bitrate(s):
3676 if not isinstance(s, compat_str):
3677 return
3678 mobj = re.search(r'\b(\d+)\s*kbps', s)
3679 if mobj:
3680 return int(mobj.group(1))
3681
3682
a942d6cb 3683def month_by_name(name, lang='en'):
caefb1de
PH
3684 """ Return the number of a month by (locale-independently) English name """
3685
f6717dec 3686 month_names = MONTH_NAMES.get(lang, MONTH_NAMES['en'])
a942d6cb 3687
caefb1de 3688 try:
f6717dec 3689 return month_names.index(name) + 1
7105440c
YCH
3690 except ValueError:
3691 return None
3692
3693
3694def month_by_abbreviation(abbrev):
3695 """ Return the number of a month by (locale-independently) English
3696 abbreviations """
3697
3698 try:
3699 return [s[:3] for s in ENGLISH_MONTH_NAMES].index(abbrev) + 1
caefb1de
PH
3700 except ValueError:
3701 return None
18258362
JMF
3702
3703
5aafe895 3704def fix_xml_ampersands(xml_str):
18258362 3705 """Replace all the '&' by '&amp;' in XML"""
5aafe895
PH
3706 return re.sub(
3707 r'&(?!amp;|lt;|gt;|apos;|quot;|#x[0-9a-fA-F]{,4};|#[0-9]{,4};)',
28e614de 3708 '&amp;',
5aafe895 3709 xml_str)
e3946f98
PH
3710
3711
3712def setproctitle(title):
8bf48f23 3713 assert isinstance(title, compat_str)
c1c05c67
YCH
3714
3715 # ctypes in Jython is not complete
3716 # http://bugs.jython.org/issue2148
3717 if sys.platform.startswith('java'):
3718 return
3719
e3946f98 3720 try:
611c1dd9 3721 libc = ctypes.cdll.LoadLibrary('libc.so.6')
e3946f98
PH
3722 except OSError:
3723 return
2f49bcd6
RC
3724 except TypeError:
3725 # LoadLibrary in Windows Python 2.7.13 only expects
3726 # a bytestring, but since unicode_literals turns
3727 # every string into a unicode string, it fails.
3728 return
6eefe533
PH
3729 title_bytes = title.encode('utf-8')
3730 buf = ctypes.create_string_buffer(len(title_bytes))
3731 buf.value = title_bytes
e3946f98 3732 try:
6eefe533 3733 libc.prctl(15, buf, 0, 0, 0)
e3946f98
PH
3734 except AttributeError:
3735 return # Strange libc, just skip this
d7dda168
PH
3736
3737
3738def remove_start(s, start):
46bc9b7d 3739 return s[len(start):] if s is not None and s.startswith(start) else s
29eb5174
PH
3740
3741
2b9faf55 3742def remove_end(s, end):
46bc9b7d 3743 return s[:-len(end)] if s is not None and s.endswith(end) else s
2b9faf55
PH
3744
3745
31b2051e
S
3746def remove_quotes(s):
3747 if s is None or len(s) < 2:
3748 return s
3749 for quote in ('"', "'", ):
3750 if s[0] == quote and s[-1] == quote:
3751 return s[1:-1]
3752 return s
3753
3754
b6e0c7d2
U
3755def get_domain(url):
3756 domain = re.match(r'(?:https?:\/\/)?(?:www\.)?(?P<domain>[^\n\/]+\.[^\n\/]+)(?:\/(.*))?', url)
3757 return domain.group('domain') if domain else None
3758
3759
29eb5174 3760def url_basename(url):
9b8aaeed 3761 path = compat_urlparse.urlparse(url).path
28e614de 3762 return path.strip('/').split('/')[-1]
aa94a6d3
PH
3763
3764
02dc0a36
S
3765def base_url(url):
3766 return re.match(r'https?://[^?#&]+/', url).group()
3767
3768
e34c3361 3769def urljoin(base, path):
4b5de77b
S
3770 if isinstance(path, bytes):
3771 path = path.decode('utf-8')
e34c3361
S
3772 if not isinstance(path, compat_str) or not path:
3773 return None
fad4ceb5 3774 if re.match(r'^(?:[a-zA-Z][a-zA-Z0-9+-.]*:)?//', path):
e34c3361 3775 return path
4b5de77b
S
3776 if isinstance(base, bytes):
3777 base = base.decode('utf-8')
3778 if not isinstance(base, compat_str) or not re.match(
3779 r'^(?:https?:)?//', base):
e34c3361
S
3780 return None
3781 return compat_urlparse.urljoin(base, path)
3782
3783
aa94a6d3
PH
3784class HEADRequest(compat_urllib_request.Request):
3785 def get_method(self):
611c1dd9 3786 return 'HEAD'
7217e148
PH
3787
3788
95cf60e8
S
3789class PUTRequest(compat_urllib_request.Request):
3790 def get_method(self):
3791 return 'PUT'
3792
3793
9732d77e 3794def int_or_none(v, scale=1, default=None, get_attr=None, invscale=1):
28746fbd
PH
3795 if get_attr:
3796 if v is not None:
3797 v = getattr(v, get_attr, None)
9572013d
PH
3798 if v == '':
3799 v = None
1812afb7
S
3800 if v is None:
3801 return default
3802 try:
3803 return int(v) * invscale // scale
5e1271c5 3804 except (ValueError, TypeError):
af98f8ff 3805 return default
9732d77e 3806
9572013d 3807
40a90862
JMF
3808def str_or_none(v, default=None):
3809 return default if v is None else compat_str(v)
3810
9732d77e
PH
3811
3812def str_to_int(int_str):
48d4681e 3813 """ A more relaxed version of int_or_none """
42db58ec 3814 if isinstance(int_str, compat_integer_types):
348c6bf1 3815 return int_str
42db58ec
S
3816 elif isinstance(int_str, compat_str):
3817 int_str = re.sub(r'[,\.\+]', '', int_str)
3818 return int_or_none(int_str)
608d11f5
PH
3819
3820
9732d77e 3821def float_or_none(v, scale=1, invscale=1, default=None):
caf80631
S
3822 if v is None:
3823 return default
3824 try:
3825 return float(v) * invscale / scale
5e1271c5 3826 except (ValueError, TypeError):
caf80631 3827 return default
43f775e4
PH
3828
3829
c7e327c4
S
3830def bool_or_none(v, default=None):
3831 return v if isinstance(v, bool) else default
3832
3833
53cd37ba
S
3834def strip_or_none(v, default=None):
3835 return v.strip() if isinstance(v, compat_str) else default
b72b4431
S
3836
3837
af03000a
S
3838def url_or_none(url):
3839 if not url or not isinstance(url, compat_str):
3840 return None
3841 url = url.strip()
29f7c58a 3842 return url if re.match(r'^(?:(?:https?|rt(?:m(?:pt?[es]?|fp)|sp[su]?)|mms|ftps?):)?//', url) else None
af03000a
S
3843
3844
e29663c6 3845def strftime_or_none(timestamp, date_format, default=None):
3846 datetime_object = None
3847 try:
3848 if isinstance(timestamp, compat_numeric_types): # unix timestamp
3849 datetime_object = datetime.datetime.utcfromtimestamp(timestamp)
3850 elif isinstance(timestamp, compat_str): # assume YYYYMMDD
3851 datetime_object = datetime.datetime.strptime(timestamp, '%Y%m%d')
3852 return datetime_object.strftime(date_format)
3853 except (ValueError, TypeError, AttributeError):
3854 return default
3855
3856
608d11f5 3857def parse_duration(s):
8f9312c3 3858 if not isinstance(s, compat_basestring):
608d11f5
PH
3859 return None
3860
ca7b3246
S
3861 s = s.strip()
3862
acaff495 3863 days, hours, mins, secs, ms = [None] * 5
15846398 3864 m = re.match(r'(?:(?:(?:(?P<days>[0-9]+):)?(?P<hours>[0-9]+):)?(?P<mins>[0-9]+):)?(?P<secs>[0-9]+)(?P<ms>\.[0-9]+)?Z?$', s)
acaff495 3865 if m:
3866 days, hours, mins, secs, ms = m.groups()
3867 else:
3868 m = re.match(
056653bb
S
3869 r'''(?ix)(?:P?
3870 (?:
3871 [0-9]+\s*y(?:ears?)?\s*
3872 )?
3873 (?:
3874 [0-9]+\s*m(?:onths?)?\s*
3875 )?
3876 (?:
3877 [0-9]+\s*w(?:eeks?)?\s*
3878 )?
8f4b58d7 3879 (?:
acaff495 3880 (?P<days>[0-9]+)\s*d(?:ays?)?\s*
8f4b58d7 3881 )?
056653bb 3882 T)?
acaff495 3883 (?:
3884 (?P<hours>[0-9]+)\s*h(?:ours?)?\s*
3885 )?
3886 (?:
3887 (?P<mins>[0-9]+)\s*m(?:in(?:ute)?s?)?\s*
3888 )?
3889 (?:
3890 (?P<secs>[0-9]+)(?P<ms>\.[0-9]+)?\s*s(?:ec(?:ond)?s?)?\s*
15846398 3891 )?Z?$''', s)
acaff495 3892 if m:
3893 days, hours, mins, secs, ms = m.groups()
3894 else:
15846398 3895 m = re.match(r'(?i)(?:(?P<hours>[0-9.]+)\s*(?:hours?)|(?P<mins>[0-9.]+)\s*(?:mins?\.?|minutes?)\s*)Z?$', s)
acaff495 3896 if m:
3897 hours, mins = m.groups()
3898 else:
3899 return None
3900
3901 duration = 0
3902 if secs:
3903 duration += float(secs)
3904 if mins:
3905 duration += float(mins) * 60
3906 if hours:
3907 duration += float(hours) * 60 * 60
3908 if days:
3909 duration += float(days) * 24 * 60 * 60
3910 if ms:
3911 duration += float(ms)
3912 return duration
91d7d0b3
JMF
3913
3914
e65e4c88 3915def prepend_extension(filename, ext, expected_real_ext=None):
5f6a1245 3916 name, real_ext = os.path.splitext(filename)
e65e4c88
S
3917 return (
3918 '{0}.{1}{2}'.format(name, ext, real_ext)
3919 if not expected_real_ext or real_ext[1:] == expected_real_ext
3920 else '{0}.{1}'.format(filename, ext))
d70ad093
PH
3921
3922
b3ed15b7
S
3923def replace_extension(filename, ext, expected_real_ext=None):
3924 name, real_ext = os.path.splitext(filename)
3925 return '{0}.{1}'.format(
3926 name if not expected_real_ext or real_ext[1:] == expected_real_ext else filename,
3927 ext)
3928
3929
d70ad093
PH
3930def check_executable(exe, args=[]):
3931 """ Checks if the given binary is installed somewhere in PATH, and returns its name.
3932 args can be a list of arguments for a short output (like -version) """
3933 try:
f5b1bca9 3934 process_communicate_or_kill(subprocess.Popen(
3935 [exe] + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE))
d70ad093
PH
3936 except OSError:
3937 return False
3938 return exe
b7ab0590
PH
3939
3940
95807118 3941def get_exe_version(exe, args=['--version'],
cae97f65 3942 version_re=None, unrecognized='present'):
95807118
PH
3943 """ Returns the version of the specified executable,
3944 or False if the executable is not present """
3945 try:
b64d04c1 3946 # STDIN should be redirected too. On UNIX-like systems, ffmpeg triggers
7a5c1cfe 3947 # SIGTTOU if yt-dlp is run in the background.
067aa17e 3948 # See https://github.com/ytdl-org/youtube-dl/issues/955#issuecomment-209789656
f5b1bca9 3949 out, _ = process_communicate_or_kill(subprocess.Popen(
54116803 3950 [encodeArgument(exe)] + args,
00ca7552 3951 stdin=subprocess.PIPE,
f5b1bca9 3952 stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
95807118
PH
3953 except OSError:
3954 return False
cae97f65
PH
3955 if isinstance(out, bytes): # Python 2.x
3956 out = out.decode('ascii', 'ignore')
3957 return detect_exe_version(out, version_re, unrecognized)
3958
3959
3960def detect_exe_version(output, version_re=None, unrecognized='present'):
3961 assert isinstance(output, compat_str)
3962 if version_re is None:
3963 version_re = r'version\s+([-0-9._a-zA-Z]+)'
3964 m = re.search(version_re, output)
95807118
PH
3965 if m:
3966 return m.group(1)
3967 else:
3968 return unrecognized
3969
3970
cb89cfc1 3971class LazyList(collections.abc.Sequence):
483336e7 3972 ''' Lazy immutable list from an iterable
3973 Note that slices of a LazyList are lists and not LazyList'''
3974
8e5fecc8 3975 class IndexError(IndexError):
3976 pass
3977
483336e7 3978 def __init__(self, iterable):
3979 self.__iterable = iter(iterable)
3980 self.__cache = []
28419ca2 3981 self.__reversed = False
483336e7 3982
3983 def __iter__(self):
28419ca2 3984 if self.__reversed:
3985 # We need to consume the entire iterable to iterate in reverse
981052c9 3986 yield from self.exhaust()
28419ca2 3987 return
3988 yield from self.__cache
483336e7 3989 for item in self.__iterable:
3990 self.__cache.append(item)
3991 yield item
3992
981052c9 3993 def __exhaust(self):
483336e7 3994 self.__cache.extend(self.__iterable)
28419ca2 3995 return self.__cache
3996
981052c9 3997 def exhaust(self):
3998 ''' Evaluate the entire iterable '''
3999 return self.__exhaust()[::-1 if self.__reversed else 1]
4000
28419ca2 4001 @staticmethod
981052c9 4002 def __reverse_index(x):
e0f2b4b4 4003 return None if x is None else -(x + 1)
483336e7 4004
4005 def __getitem__(self, idx):
4006 if isinstance(idx, slice):
28419ca2 4007 if self.__reversed:
e0f2b4b4 4008 idx = slice(self.__reverse_index(idx.start), self.__reverse_index(idx.stop), -(idx.step or 1))
4009 start, stop, step = idx.start, idx.stop, idx.step or 1
483336e7 4010 elif isinstance(idx, int):
28419ca2 4011 if self.__reversed:
981052c9 4012 idx = self.__reverse_index(idx)
e0f2b4b4 4013 start, stop, step = idx, idx, 0
483336e7 4014 else:
4015 raise TypeError('indices must be integers or slices')
e0f2b4b4 4016 if ((start or 0) < 0 or (stop or 0) < 0
4017 or (start is None and step < 0)
4018 or (stop is None and step > 0)):
483336e7 4019 # We need to consume the entire iterable to be able to slice from the end
4020 # Obviously, never use this with infinite iterables
8e5fecc8 4021 self.__exhaust()
4022 try:
4023 return self.__cache[idx]
4024 except IndexError as e:
4025 raise self.IndexError(e) from e
e0f2b4b4 4026 n = max(start or 0, stop or 0) - len(self.__cache) + 1
28419ca2 4027 if n > 0:
4028 self.__cache.extend(itertools.islice(self.__iterable, n))
8e5fecc8 4029 try:
4030 return self.__cache[idx]
4031 except IndexError as e:
4032 raise self.IndexError(e) from e
483336e7 4033
4034 def __bool__(self):
4035 try:
28419ca2 4036 self[-1] if self.__reversed else self[0]
8e5fecc8 4037 except self.IndexError:
483336e7 4038 return False
4039 return True
4040
4041 def __len__(self):
8e5fecc8 4042 self.__exhaust()
483336e7 4043 return len(self.__cache)
4044
981052c9 4045 def reverse(self):
28419ca2 4046 self.__reversed = not self.__reversed
4047 return self
4048
4049 def __repr__(self):
4050 # repr and str should mimic a list. So we exhaust the iterable
4051 return repr(self.exhaust())
4052
4053 def __str__(self):
4054 return repr(self.exhaust())
4055
483336e7 4056
7be9ccff 4057class PagedList:
dd26ced1
PH
4058 def __len__(self):
4059 # This is only useful for tests
4060 return len(self.getslice())
4061
7be9ccff 4062 def __init__(self, pagefunc, pagesize, use_cache=True):
4063 self._pagefunc = pagefunc
4064 self._pagesize = pagesize
4065 self._use_cache = use_cache
4066 self._cache = {}
4067
4068 def getpage(self, pagenum):
4069 page_results = self._cache.get(pagenum) or list(self._pagefunc(pagenum))
4070 if self._use_cache:
4071 self._cache[pagenum] = page_results
4072 return page_results
4073
4074 def getslice(self, start=0, end=None):
4075 return list(self._getslice(start, end))
4076
4077 def _getslice(self, start, end):
55575225 4078 raise NotImplementedError('This method must be implemented by subclasses')
4079
4080 def __getitem__(self, idx):
7be9ccff 4081 # NOTE: cache must be enabled if this is used
55575225 4082 if not isinstance(idx, int) or idx < 0:
4083 raise TypeError('indices must be non-negative integers')
4084 entries = self.getslice(idx, idx + 1)
4085 return entries[0] if entries else None
4086
9c44d242
PH
4087
4088class OnDemandPagedList(PagedList):
7be9ccff 4089 def _getslice(self, start, end):
b7ab0590
PH
4090 for pagenum in itertools.count(start // self._pagesize):
4091 firstid = pagenum * self._pagesize
4092 nextfirstid = pagenum * self._pagesize + self._pagesize
4093 if start >= nextfirstid:
4094 continue
4095
b7ab0590
PH
4096 startv = (
4097 start % self._pagesize
4098 if firstid <= start < nextfirstid
4099 else 0)
b7ab0590
PH
4100 endv = (
4101 ((end - 1) % self._pagesize) + 1
4102 if (end is not None and firstid <= end <= nextfirstid)
4103 else None)
4104
7be9ccff 4105 page_results = self.getpage(pagenum)
b7ab0590
PH
4106 if startv != 0 or endv is not None:
4107 page_results = page_results[startv:endv]
7be9ccff 4108 yield from page_results
b7ab0590
PH
4109
4110 # A little optimization - if current page is not "full", ie. does
4111 # not contain page_size videos then we can assume that this page
4112 # is the last one - there are no more ids on further pages -
4113 # i.e. no need to query again.
4114 if len(page_results) + startv < self._pagesize:
4115 break
4116
4117 # If we got the whole page, but the next page is not interesting,
4118 # break out early as well
4119 if end == nextfirstid:
4120 break
81c2f20b
PH
4121
4122
9c44d242
PH
4123class InAdvancePagedList(PagedList):
4124 def __init__(self, pagefunc, pagecount, pagesize):
9c44d242 4125 self._pagecount = pagecount
7be9ccff 4126 PagedList.__init__(self, pagefunc, pagesize, True)
9c44d242 4127
7be9ccff 4128 def _getslice(self, start, end):
9c44d242
PH
4129 start_page = start // self._pagesize
4130 end_page = (
4131 self._pagecount if end is None else (end // self._pagesize + 1))
4132 skip_elems = start - start_page * self._pagesize
4133 only_more = None if end is None else end - start
4134 for pagenum in range(start_page, end_page):
7be9ccff 4135 page_results = self.getpage(pagenum)
9c44d242 4136 if skip_elems:
7be9ccff 4137 page_results = page_results[skip_elems:]
9c44d242
PH
4138 skip_elems = None
4139 if only_more is not None:
7be9ccff 4140 if len(page_results) < only_more:
4141 only_more -= len(page_results)
9c44d242 4142 else:
7be9ccff 4143 yield from page_results[:only_more]
9c44d242 4144 break
7be9ccff 4145 yield from page_results
9c44d242
PH
4146
4147
81c2f20b 4148def uppercase_escape(s):
676eb3f2 4149 unicode_escape = codecs.getdecoder('unicode_escape')
81c2f20b 4150 return re.sub(
a612753d 4151 r'\\U[0-9a-fA-F]{8}',
676eb3f2
PH
4152 lambda m: unicode_escape(m.group(0))[0],
4153 s)
0fe2ff78
YCH
4154
4155
4156def lowercase_escape(s):
4157 unicode_escape = codecs.getdecoder('unicode_escape')
4158 return re.sub(
4159 r'\\u[0-9a-fA-F]{4}',
4160 lambda m: unicode_escape(m.group(0))[0],
4161 s)
b53466e1 4162
d05cfe06
S
4163
4164def escape_rfc3986(s):
4165 """Escape non-ASCII characters as suggested by RFC 3986"""
8f9312c3 4166 if sys.version_info < (3, 0) and isinstance(s, compat_str):
d05cfe06 4167 s = s.encode('utf-8')
ecc0c5ee 4168 return compat_urllib_parse.quote(s, b"%/;:@&=+$,!~*'()?#[]")
d05cfe06
S
4169
4170
4171def escape_url(url):
4172 """Escape URL as suggested by RFC 3986"""
4173 url_parsed = compat_urllib_parse_urlparse(url)
4174 return url_parsed._replace(
efbed08d 4175 netloc=url_parsed.netloc.encode('idna').decode('ascii'),
d05cfe06
S
4176 path=escape_rfc3986(url_parsed.path),
4177 params=escape_rfc3986(url_parsed.params),
4178 query=escape_rfc3986(url_parsed.query),
4179 fragment=escape_rfc3986(url_parsed.fragment)
4180 ).geturl()
4181
62e609ab 4182
4dfbf869 4183def parse_qs(url):
4184 return compat_parse_qs(compat_urllib_parse_urlparse(url).query)
4185
4186
62e609ab
PH
4187def read_batch_urls(batch_fd):
4188 def fixup(url):
4189 if not isinstance(url, compat_str):
4190 url = url.decode('utf-8', 'replace')
8c04f0be 4191 BOM_UTF8 = ('\xef\xbb\xbf', '\ufeff')
4192 for bom in BOM_UTF8:
4193 if url.startswith(bom):
4194 url = url[len(bom):]
4195 url = url.lstrip()
4196 if not url or url.startswith(('#', ';', ']')):
62e609ab 4197 return False
8c04f0be 4198 # "#" cannot be stripped out since it is part of the URI
4199 # However, it can be safely stipped out if follwing a whitespace
4200 return re.split(r'\s#', url, 1)[0].rstrip()
62e609ab
PH
4201
4202 with contextlib.closing(batch_fd) as fd:
4203 return [url for url in map(fixup, fd) if url]
b74fa8cd
JMF
4204
4205
4206def urlencode_postdata(*args, **kargs):
15707c7e 4207 return compat_urllib_parse_urlencode(*args, **kargs).encode('ascii')
bcf89ce6
PH
4208
4209
38f9ef31 4210def update_url_query(url, query):
cacd9966
YCH
4211 if not query:
4212 return url
38f9ef31 4213 parsed_url = compat_urlparse.urlparse(url)
4214 qs = compat_parse_qs(parsed_url.query)
4215 qs.update(query)
4216 return compat_urlparse.urlunparse(parsed_url._replace(
15707c7e 4217 query=compat_urllib_parse_urlencode(qs, True)))
16392824 4218
8e60dc75 4219
ed0291d1
S
4220def update_Request(req, url=None, data=None, headers={}, query={}):
4221 req_headers = req.headers.copy()
4222 req_headers.update(headers)
4223 req_data = data or req.data
4224 req_url = update_url_query(url or req.get_full_url(), query)
95cf60e8
S
4225 req_get_method = req.get_method()
4226 if req_get_method == 'HEAD':
4227 req_type = HEADRequest
4228 elif req_get_method == 'PUT':
4229 req_type = PUTRequest
4230 else:
4231 req_type = compat_urllib_request.Request
ed0291d1
S
4232 new_req = req_type(
4233 req_url, data=req_data, headers=req_headers,
4234 origin_req_host=req.origin_req_host, unverifiable=req.unverifiable)
4235 if hasattr(req, 'timeout'):
4236 new_req.timeout = req.timeout
4237 return new_req
4238
4239
10c87c15 4240def _multipart_encode_impl(data, boundary):
0c265486
YCH
4241 content_type = 'multipart/form-data; boundary=%s' % boundary
4242
4243 out = b''
4244 for k, v in data.items():
4245 out += b'--' + boundary.encode('ascii') + b'\r\n'
4246 if isinstance(k, compat_str):
4247 k = k.encode('utf-8')
4248 if isinstance(v, compat_str):
4249 v = v.encode('utf-8')
4250 # RFC 2047 requires non-ASCII field names to be encoded, while RFC 7578
4251 # suggests sending UTF-8 directly. Firefox sends UTF-8, too
b2ad479d 4252 content = b'Content-Disposition: form-data; name="' + k + b'"\r\n\r\n' + v + b'\r\n'
0c265486
YCH
4253 if boundary.encode('ascii') in content:
4254 raise ValueError('Boundary overlaps with data')
4255 out += content
4256
4257 out += b'--' + boundary.encode('ascii') + b'--\r\n'
4258
4259 return out, content_type
4260
4261
4262def multipart_encode(data, boundary=None):
4263 '''
4264 Encode a dict to RFC 7578-compliant form-data
4265
4266 data:
4267 A dict where keys and values can be either Unicode or bytes-like
4268 objects.
4269 boundary:
4270 If specified a Unicode object, it's used as the boundary. Otherwise
4271 a random boundary is generated.
4272
4273 Reference: https://tools.ietf.org/html/rfc7578
4274 '''
4275 has_specified_boundary = boundary is not None
4276
4277 while True:
4278 if boundary is None:
4279 boundary = '---------------' + str(random.randrange(0x0fffffff, 0xffffffff))
4280
4281 try:
10c87c15 4282 out, content_type = _multipart_encode_impl(data, boundary)
0c265486
YCH
4283 break
4284 except ValueError:
4285 if has_specified_boundary:
4286 raise
4287 boundary = None
4288
4289 return out, content_type
4290
4291
86296ad2 4292def dict_get(d, key_or_keys, default=None, skip_false_values=True):
cbecc9b9
S
4293 if isinstance(key_or_keys, (list, tuple)):
4294 for key in key_or_keys:
86296ad2
S
4295 if key not in d or d[key] is None or skip_false_values and not d[key]:
4296 continue
4297 return d[key]
cbecc9b9
S
4298 return default
4299 return d.get(key_or_keys, default)
4300
4301
329ca3be 4302def try_get(src, getter, expected_type=None):
6606817a 4303 for get in variadic(getter):
a32a9a7e
S
4304 try:
4305 v = get(src)
4306 except (AttributeError, KeyError, TypeError, IndexError):
4307 pass
4308 else:
4309 if expected_type is None or isinstance(v, expected_type):
4310 return v
329ca3be
S
4311
4312
6cc62232
S
4313def merge_dicts(*dicts):
4314 merged = {}
4315 for a_dict in dicts:
4316 for k, v in a_dict.items():
4317 if v is None:
4318 continue
3089bc74
S
4319 if (k not in merged
4320 or (isinstance(v, compat_str) and v
4321 and isinstance(merged[k], compat_str)
4322 and not merged[k])):
6cc62232
S
4323 merged[k] = v
4324 return merged
4325
4326
8e60dc75
S
4327def encode_compat_str(string, encoding=preferredencoding(), errors='strict'):
4328 return string if isinstance(string, compat_str) else compat_str(string, encoding, errors)
4329
16392824 4330
a1a530b0
PH
4331US_RATINGS = {
4332 'G': 0,
4333 'PG': 10,
4334 'PG-13': 13,
4335 'R': 16,
4336 'NC': 18,
4337}
fac55558
PH
4338
4339
a8795327 4340TV_PARENTAL_GUIDELINES = {
5a16c9d9
RA
4341 'TV-Y': 0,
4342 'TV-Y7': 7,
4343 'TV-G': 0,
4344 'TV-PG': 0,
4345 'TV-14': 14,
4346 'TV-MA': 17,
a8795327
S
4347}
4348
4349
146c80e2 4350def parse_age_limit(s):
a8795327
S
4351 if type(s) == int:
4352 return s if 0 <= s <= 21 else None
4353 if not isinstance(s, compat_basestring):
d838b1bd 4354 return None
146c80e2 4355 m = re.match(r'^(?P<age>\d{1,2})\+?$', s)
a8795327
S
4356 if m:
4357 return int(m.group('age'))
5c5fae6d 4358 s = s.upper()
a8795327
S
4359 if s in US_RATINGS:
4360 return US_RATINGS[s]
5a16c9d9 4361 m = re.match(r'^TV[_-]?(%s)$' % '|'.join(k[3:] for k in TV_PARENTAL_GUIDELINES), s)
b8361187 4362 if m:
5a16c9d9 4363 return TV_PARENTAL_GUIDELINES['TV-' + m.group(1)]
b8361187 4364 return None
146c80e2
S
4365
4366
fac55558 4367def strip_jsonp(code):
609a61e3 4368 return re.sub(
5552c9eb 4369 r'''(?sx)^
e9c671d5 4370 (?:window\.)?(?P<func_name>[a-zA-Z0-9_.$]*)
5552c9eb
YCH
4371 (?:\s*&&\s*(?P=func_name))?
4372 \s*\(\s*(?P<callback_data>.*)\);?
4373 \s*?(?://[^\n]*)*$''',
4374 r'\g<callback_data>', code)
478c2c61
PH
4375
4376
5c610515 4377def js_to_json(code, vars={}):
4378 # vars is a dict of var, val pairs to substitute
c843e685 4379 COMMENT_RE = r'/\*(?:(?!\*/).)*?\*/|//[^\n]*\n'
4195096e
S
4380 SKIP_RE = r'\s*(?:{comment})?\s*'.format(comment=COMMENT_RE)
4381 INTEGER_TABLE = (
4382 (r'(?s)^(0[xX][0-9a-fA-F]+){skip}:?$'.format(skip=SKIP_RE), 16),
4383 (r'(?s)^(0+[0-7]+){skip}:?$'.format(skip=SKIP_RE), 8),
4384 )
4385
e05f6939 4386 def fix_kv(m):
e7b6d122
PH
4387 v = m.group(0)
4388 if v in ('true', 'false', 'null'):
4389 return v
421ddcb8
C
4390 elif v in ('undefined', 'void 0'):
4391 return 'null'
8bdd16b4 4392 elif v.startswith('/*') or v.startswith('//') or v.startswith('!') or v == ',':
bd1e4844 4393 return ""
4394
4395 if v[0] in ("'", '"'):
4396 v = re.sub(r'(?s)\\.|"', lambda m: {
e7b6d122 4397 '"': '\\"',
bd1e4844 4398 "\\'": "'",
4399 '\\\n': '',
4400 '\\x': '\\u00',
4401 }.get(m.group(0), m.group(0)), v[1:-1])
8bdd16b4 4402 else:
4403 for regex, base in INTEGER_TABLE:
4404 im = re.match(regex, v)
4405 if im:
4406 i = int(im.group(1), base)
4407 return '"%d":' % i if v.endswith(':') else '%d' % i
89ac4a19 4408
5c610515 4409 if v in vars:
4410 return vars[v]
4411
e7b6d122 4412 return '"%s"' % v
e05f6939 4413
bd1e4844 4414 return re.sub(r'''(?sx)
4415 "(?:[^"\\]*(?:\\\\|\\['"nurtbfx/\n]))*[^"\\]*"|
4416 '(?:[^'\\]*(?:\\\\|\\['"nurtbfx/\n]))*[^'\\]*'|
4195096e 4417 {comment}|,(?={skip}[\]}}])|
421ddcb8 4418 void\s0|(?:(?<![0-9])[eE]|[a-df-zA-DF-Z_$])[.a-zA-Z_$0-9]*|
4195096e 4419 \b(?:0[xX][0-9a-fA-F]+|0+[0-7]+)(?:{skip}:)?|
8bdd16b4 4420 [0-9]+(?={skip}:)|
4421 !+
4195096e 4422 '''.format(comment=COMMENT_RE, skip=SKIP_RE), fix_kv, code)
e05f6939
PH
4423
4424
478c2c61
PH
4425def qualities(quality_ids):
4426 """ Get a numeric quality value out of a list of possible values """
4427 def q(qid):
4428 try:
4429 return quality_ids.index(qid)
4430 except ValueError:
4431 return -1
4432 return q
4433
acd69589 4434
de6000d9 4435DEFAULT_OUTTMPL = {
4436 'default': '%(title)s [%(id)s].%(ext)s',
72755351 4437 'chapter': '%(title)s - %(section_number)03d %(section_title)s [%(id)s].%(ext)s',
de6000d9 4438}
4439OUTTMPL_TYPES = {
72755351 4440 'chapter': None,
de6000d9 4441 'subtitle': None,
4442 'thumbnail': None,
4443 'description': 'description',
4444 'annotation': 'annotations.xml',
4445 'infojson': 'info.json',
5112f26a 4446 'pl_thumbnail': None,
de6000d9 4447 'pl_description': 'description',
4448 'pl_infojson': 'info.json',
4449}
0a871f68 4450
143db31d 4451# As of [1] format syntax is:
4452# %[mapping_key][conversion_flags][minimum_width][.precision][length_modifier]type
4453# 1. https://docs.python.org/2/library/stdtypes.html#string-formatting
901130bb 4454STR_FORMAT_RE_TMPL = r'''(?x)
4455 (?<!%)(?P<prefix>(?:%%)*)
143db31d 4456 %
752cda38 4457 (?P<has_key>\((?P<key>{0})\))? # mapping key
4458 (?P<format>
4459 (?:[#0\-+ ]+)? # conversion flags (optional)
4460 (?:\d+)? # minimum field width (optional)
4461 (?:\.\d+)? # precision (optional)
4462 [hlL]? # length modifier (optional)
901130bb 4463 {1} # conversion type
752cda38 4464 )
143db31d 4465'''
4466
7d1eb38a 4467
901130bb 4468STR_FORMAT_TYPES = 'diouxXeEfFgGcrs'
a020a0dc 4469
7d1eb38a 4470
a020a0dc
PH
4471def limit_length(s, length):
4472 """ Add ellipses to overly long strings """
4473 if s is None:
4474 return None
4475 ELLIPSES = '...'
4476 if len(s) > length:
4477 return s[:length - len(ELLIPSES)] + ELLIPSES
4478 return s
48844745
PH
4479
4480
4481def version_tuple(v):
5f9b8394 4482 return tuple(int(e) for e in re.split(r'[-.]', v))
48844745
PH
4483
4484
4485def is_outdated_version(version, limit, assume_new=True):
4486 if not version:
4487 return not assume_new
4488 try:
4489 return version_tuple(version) < version_tuple(limit)
4490 except ValueError:
4491 return not assume_new
732ea2f0
PH
4492
4493
4494def ytdl_is_updateable():
7a5c1cfe 4495 """ Returns if yt-dlp can be updated with -U """
735d865e 4496 return False
4497
732ea2f0
PH
4498 from zipimport import zipimporter
4499
4500 return isinstance(globals().get('__loader__'), zipimporter) or hasattr(sys, 'frozen')
7d4111ed
PH
4501
4502
4503def args_to_str(args):
4504 # Get a short string representation for a subprocess command
702ccf2d 4505 return ' '.join(compat_shlex_quote(a) for a in args)
2ccd1b10
PH
4506
4507
9b9c5355 4508def error_to_compat_str(err):
fdae2358
S
4509 err_str = str(err)
4510 # On python 2 error byte string must be decoded with proper
4511 # encoding rather than ascii
4512 if sys.version_info[0] < 3:
4513 err_str = err_str.decode(preferredencoding())
4514 return err_str
4515
4516
c460bdd5 4517def mimetype2ext(mt):
eb9ee194
S
4518 if mt is None:
4519 return None
4520
765ac263
JMF
4521 ext = {
4522 'audio/mp4': 'm4a',
6c33d24b
YCH
4523 # Per RFC 3003, audio/mpeg can be .mp1, .mp2 or .mp3. Here use .mp3 as
4524 # it's the most popular one
4525 'audio/mpeg': 'mp3',
ba39289d 4526 'audio/x-wav': 'wav',
765ac263
JMF
4527 }.get(mt)
4528 if ext is not None:
4529 return ext
4530
c460bdd5 4531 _, _, res = mt.rpartition('/')
6562d34a 4532 res = res.split(';')[0].strip().lower()
c460bdd5
PH
4533
4534 return {
f6861ec9 4535 '3gpp': '3gp',
cafcf657 4536 'smptett+xml': 'tt',
cafcf657 4537 'ttaf+xml': 'dfxp',
a0d8d704 4538 'ttml+xml': 'ttml',
f6861ec9 4539 'x-flv': 'flv',
a0d8d704 4540 'x-mp4-fragmented': 'mp4',
d4f05d47 4541 'x-ms-sami': 'sami',
a0d8d704 4542 'x-ms-wmv': 'wmv',
b4173f15
RA
4543 'mpegurl': 'm3u8',
4544 'x-mpegurl': 'm3u8',
4545 'vnd.apple.mpegurl': 'm3u8',
4546 'dash+xml': 'mpd',
b4173f15 4547 'f4m+xml': 'f4m',
f164b971 4548 'hds+xml': 'f4m',
e910fe2f 4549 'vnd.ms-sstr+xml': 'ism',
c2b2c7e1 4550 'quicktime': 'mov',
98ce1a3f 4551 'mp2t': 'ts',
39e7107d 4552 'x-wav': 'wav',
c460bdd5
PH
4553 }.get(res, res)
4554
4555
4f3c5e06 4556def parse_codecs(codecs_str):
4557 # http://tools.ietf.org/html/rfc6381
4558 if not codecs_str:
4559 return {}
a0566bbf 4560 split_codecs = list(filter(None, map(
dbf5416a 4561 str.strip, codecs_str.strip().strip(',').split(','))))
4f3c5e06 4562 vcodec, acodec = None, None
a0566bbf 4563 for full_codec in split_codecs:
4f3c5e06 4564 codec = full_codec.split('.')[0]
28cc2241 4565 if codec in ('avc1', 'avc2', 'avc3', 'avc4', 'vp9', 'vp8', 'hev1', 'hev2', 'h263', 'h264', 'mp4v', 'hvc1', 'av01', 'theora'):
4f3c5e06 4566 if not vcodec:
4567 vcodec = full_codec
60f5c9fb 4568 elif codec in ('mp4a', 'opus', 'vorbis', 'mp3', 'aac', 'ac-3', 'ec-3', 'eac3', 'dtsc', 'dtse', 'dtsh', 'dtsl'):
4f3c5e06 4569 if not acodec:
4570 acodec = full_codec
4571 else:
60f5c9fb 4572 write_string('WARNING: Unknown codec %s\n' % full_codec, sys.stderr)
4f3c5e06 4573 if not vcodec and not acodec:
a0566bbf 4574 if len(split_codecs) == 2:
4f3c5e06 4575 return {
a0566bbf 4576 'vcodec': split_codecs[0],
4577 'acodec': split_codecs[1],
4f3c5e06 4578 }
4579 else:
4580 return {
4581 'vcodec': vcodec or 'none',
4582 'acodec': acodec or 'none',
4583 }
4584 return {}
4585
4586
2ccd1b10 4587def urlhandle_detect_ext(url_handle):
79298173 4588 getheader = url_handle.headers.get
2ccd1b10 4589
b55ee18f
PH
4590 cd = getheader('Content-Disposition')
4591 if cd:
4592 m = re.match(r'attachment;\s*filename="(?P<filename>[^"]+)"', cd)
4593 if m:
4594 e = determine_ext(m.group('filename'), default_ext=None)
4595 if e:
4596 return e
4597
c460bdd5 4598 return mimetype2ext(getheader('Content-Type'))
05900629
PH
4599
4600
1e399778
YCH
4601def encode_data_uri(data, mime_type):
4602 return 'data:%s;base64,%s' % (mime_type, base64.b64encode(data).decode('ascii'))
4603
4604
05900629 4605def age_restricted(content_limit, age_limit):
6ec6cb4e 4606 """ Returns True iff the content should be blocked """
05900629
PH
4607
4608 if age_limit is None: # No limit set
4609 return False
4610 if content_limit is None:
4611 return False # Content available for everyone
4612 return age_limit < content_limit
61ca9a80
PH
4613
4614
4615def is_html(first_bytes):
4616 """ Detect whether a file contains HTML by examining its first bytes. """
4617
4618 BOMS = [
4619 (b'\xef\xbb\xbf', 'utf-8'),
4620 (b'\x00\x00\xfe\xff', 'utf-32-be'),
4621 (b'\xff\xfe\x00\x00', 'utf-32-le'),
4622 (b'\xff\xfe', 'utf-16-le'),
4623 (b'\xfe\xff', 'utf-16-be'),
4624 ]
4625 for bom, enc in BOMS:
4626 if first_bytes.startswith(bom):
4627 s = first_bytes[len(bom):].decode(enc, 'replace')
4628 break
4629 else:
4630 s = first_bytes.decode('utf-8', 'replace')
4631
4632 return re.match(r'^\s*<', s)
a055469f
PH
4633
4634
4635def determine_protocol(info_dict):
4636 protocol = info_dict.get('protocol')
4637 if protocol is not None:
4638 return protocol
4639
4640 url = info_dict['url']
4641 if url.startswith('rtmp'):
4642 return 'rtmp'
4643 elif url.startswith('mms'):
4644 return 'mms'
4645 elif url.startswith('rtsp'):
4646 return 'rtsp'
4647
4648 ext = determine_ext(url)
4649 if ext == 'm3u8':
4650 return 'm3u8'
4651 elif ext == 'f4m':
4652 return 'f4m'
4653
4654 return compat_urllib_parse_urlparse(url).scheme
cfb56d1a
PH
4655
4656
76d321f6 4657def render_table(header_row, data, delim=False, extraGap=0, hideEmpty=False):
cfb56d1a 4658 """ Render a list of rows, each as a list of values """
76d321f6 4659
4660 def get_max_lens(table):
4661 return [max(len(compat_str(v)) for v in col) for col in zip(*table)]
4662
4663 def filter_using_list(row, filterArray):
4664 return [col for (take, col) in zip(filterArray, row) if take]
4665
4666 if hideEmpty:
4667 max_lens = get_max_lens(data)
4668 header_row = filter_using_list(header_row, max_lens)
4669 data = [filter_using_list(row, max_lens) for row in data]
4670
cfb56d1a 4671 table = [header_row] + data
76d321f6 4672 max_lens = get_max_lens(table)
4673 if delim:
4674 table = [header_row] + [['-' * ml for ml in max_lens]] + data
4675 format_str = ' '.join('%-' + compat_str(ml + extraGap) + 's' for ml in max_lens[:-1]) + ' %s'
cfb56d1a 4676 return '\n'.join(format_str % tuple(row) for row in table)
347de493
PH
4677
4678
8f18aca8 4679def _match_one(filter_part, dct, incomplete):
77b87f05 4680 # TODO: Generalize code with YoutubeDL._build_format_filter
a047eeb6 4681 STRING_OPERATORS = {
4682 '*=': operator.contains,
4683 '^=': lambda attr, value: attr.startswith(value),
4684 '$=': lambda attr, value: attr.endswith(value),
4685 '~=': lambda attr, value: re.search(value, attr),
4686 }
347de493 4687 COMPARISON_OPERATORS = {
a047eeb6 4688 **STRING_OPERATORS,
4689 '<=': operator.le, # "<=" must be defined above "<"
347de493 4690 '<': operator.lt,
347de493 4691 '>=': operator.ge,
a047eeb6 4692 '>': operator.gt,
347de493 4693 '=': operator.eq,
347de493 4694 }
a047eeb6 4695
347de493
PH
4696 operator_rex = re.compile(r'''(?x)\s*
4697 (?P<key>[a-z_]+)
77b87f05 4698 \s*(?P<negation>!\s*)?(?P<op>%s)(?P<none_inclusive>\s*\?)?\s*
347de493
PH
4699 (?:
4700 (?P<intval>[0-9.]+(?:[kKmMgGtTpPeEzZyY]i?[Bb]?)?)|
a047eeb6 4701 (?P<quote>["\'])(?P<quotedstrval>.+?)(?P=quote)|
4702 (?P<strval>.+?)
347de493
PH
4703 )
4704 \s*$
4705 ''' % '|'.join(map(re.escape, COMPARISON_OPERATORS.keys())))
4706 m = operator_rex.search(filter_part)
4707 if m:
77b87f05
MT
4708 unnegated_op = COMPARISON_OPERATORS[m.group('op')]
4709 if m.group('negation'):
4710 op = lambda attr, value: not unnegated_op(attr, value)
4711 else:
4712 op = unnegated_op
e5a088dc 4713 actual_value = dct.get(m.group('key'))
3089bc74
S
4714 if (m.group('quotedstrval') is not None
4715 or m.group('strval') is not None
e5a088dc
S
4716 # If the original field is a string and matching comparisonvalue is
4717 # a number we should respect the origin of the original field
4718 # and process comparison value as a string (see
067aa17e 4719 # https://github.com/ytdl-org/youtube-dl/issues/11082).
3089bc74
S
4720 or actual_value is not None and m.group('intval') is not None
4721 and isinstance(actual_value, compat_str)):
db13c16e
S
4722 comparison_value = m.group('quotedstrval') or m.group('strval') or m.group('intval')
4723 quote = m.group('quote')
4724 if quote is not None:
4725 comparison_value = comparison_value.replace(r'\%s' % quote, quote)
347de493 4726 else:
a047eeb6 4727 if m.group('op') in STRING_OPERATORS:
4728 raise ValueError('Operator %s only supports string values!' % m.group('op'))
347de493
PH
4729 try:
4730 comparison_value = int(m.group('intval'))
4731 except ValueError:
4732 comparison_value = parse_filesize(m.group('intval'))
4733 if comparison_value is None:
4734 comparison_value = parse_filesize(m.group('intval') + 'B')
4735 if comparison_value is None:
4736 raise ValueError(
4737 'Invalid integer value %r in filter part %r' % (
4738 m.group('intval'), filter_part))
347de493 4739 if actual_value is None:
8f18aca8 4740 return incomplete or m.group('none_inclusive')
347de493
PH
4741 return op(actual_value, comparison_value)
4742
4743 UNARY_OPERATORS = {
1cc47c66
S
4744 '': lambda v: (v is True) if isinstance(v, bool) else (v is not None),
4745 '!': lambda v: (v is False) if isinstance(v, bool) else (v is None),
347de493
PH
4746 }
4747 operator_rex = re.compile(r'''(?x)\s*
4748 (?P<op>%s)\s*(?P<key>[a-z_]+)
4749 \s*$
4750 ''' % '|'.join(map(re.escape, UNARY_OPERATORS.keys())))
4751 m = operator_rex.search(filter_part)
4752 if m:
4753 op = UNARY_OPERATORS[m.group('op')]
4754 actual_value = dct.get(m.group('key'))
8f18aca8 4755 if incomplete and actual_value is None:
4756 return True
347de493
PH
4757 return op(actual_value)
4758
4759 raise ValueError('Invalid filter part %r' % filter_part)
4760
4761
8f18aca8 4762def match_str(filter_str, dct, incomplete=False):
4763 """ Filter a dictionary with a simple string syntax. Returns True (=passes filter) or false
4764 When incomplete, all conditions passes on missing fields
4765 """
347de493 4766 return all(
8f18aca8 4767 _match_one(filter_part.replace(r'\&', '&'), dct, incomplete)
a047eeb6 4768 for filter_part in re.split(r'(?<!\\)&', filter_str))
347de493
PH
4769
4770
4771def match_filter_func(filter_str):
8f18aca8 4772 def _match_func(info_dict, *args, **kwargs):
4773 if match_str(filter_str, info_dict, *args, **kwargs):
347de493
PH
4774 return None
4775 else:
4776 video_title = info_dict.get('title', info_dict.get('id', 'video'))
4777 return '%s does not pass filter %s, skipping ..' % (video_title, filter_str)
4778 return _match_func
91410c9b
PH
4779
4780
bf6427d2
YCH
4781def parse_dfxp_time_expr(time_expr):
4782 if not time_expr:
d631d5f9 4783 return
bf6427d2
YCH
4784
4785 mobj = re.match(r'^(?P<time_offset>\d+(?:\.\d+)?)s?$', time_expr)
4786 if mobj:
4787 return float(mobj.group('time_offset'))
4788
db2fe38b 4789 mobj = re.match(r'^(\d+):(\d\d):(\d\d(?:(?:\.|:)\d+)?)$', time_expr)
bf6427d2 4790 if mobj:
db2fe38b 4791 return 3600 * int(mobj.group(1)) + 60 * int(mobj.group(2)) + float(mobj.group(3).replace(':', '.'))
bf6427d2
YCH
4792
4793
c1c924ab
YCH
4794def srt_subtitles_timecode(seconds):
4795 return '%02d:%02d:%02d,%03d' % (seconds / 3600, (seconds % 3600) / 60, seconds % 60, (seconds % 1) * 1000)
bf6427d2
YCH
4796
4797
4798def dfxp2srt(dfxp_data):
3869028f
YCH
4799 '''
4800 @param dfxp_data A bytes-like object containing DFXP data
4801 @returns A unicode object containing converted SRT data
4802 '''
5b995f71 4803 LEGACY_NAMESPACES = (
3869028f
YCH
4804 (b'http://www.w3.org/ns/ttml', [
4805 b'http://www.w3.org/2004/11/ttaf1',
4806 b'http://www.w3.org/2006/04/ttaf1',
4807 b'http://www.w3.org/2006/10/ttaf1',
5b995f71 4808 ]),
3869028f
YCH
4809 (b'http://www.w3.org/ns/ttml#styling', [
4810 b'http://www.w3.org/ns/ttml#style',
5b995f71
RA
4811 ]),
4812 )
4813
4814 SUPPORTED_STYLING = [
4815 'color',
4816 'fontFamily',
4817 'fontSize',
4818 'fontStyle',
4819 'fontWeight',
4820 'textDecoration'
4821 ]
4822
4e335771 4823 _x = functools.partial(xpath_with_ns, ns_map={
261f4730 4824 'xml': 'http://www.w3.org/XML/1998/namespace',
4e335771 4825 'ttml': 'http://www.w3.org/ns/ttml',
5b995f71 4826 'tts': 'http://www.w3.org/ns/ttml#styling',
4e335771 4827 })
bf6427d2 4828
5b995f71
RA
4829 styles = {}
4830 default_style = {}
4831
87de7069 4832 class TTMLPElementParser(object):
5b995f71
RA
4833 _out = ''
4834 _unclosed_elements = []
4835 _applied_styles = []
bf6427d2 4836
2b14cb56 4837 def start(self, tag, attrib):
5b995f71
RA
4838 if tag in (_x('ttml:br'), 'br'):
4839 self._out += '\n'
4840 else:
4841 unclosed_elements = []
4842 style = {}
4843 element_style_id = attrib.get('style')
4844 if default_style:
4845 style.update(default_style)
4846 if element_style_id:
4847 style.update(styles.get(element_style_id, {}))
4848 for prop in SUPPORTED_STYLING:
4849 prop_val = attrib.get(_x('tts:' + prop))
4850 if prop_val:
4851 style[prop] = prop_val
4852 if style:
4853 font = ''
4854 for k, v in sorted(style.items()):
4855 if self._applied_styles and self._applied_styles[-1].get(k) == v:
4856 continue
4857 if k == 'color':
4858 font += ' color="%s"' % v
4859 elif k == 'fontSize':
4860 font += ' size="%s"' % v
4861 elif k == 'fontFamily':
4862 font += ' face="%s"' % v
4863 elif k == 'fontWeight' and v == 'bold':
4864 self._out += '<b>'
4865 unclosed_elements.append('b')
4866 elif k == 'fontStyle' and v == 'italic':
4867 self._out += '<i>'
4868 unclosed_elements.append('i')
4869 elif k == 'textDecoration' and v == 'underline':
4870 self._out += '<u>'
4871 unclosed_elements.append('u')
4872 if font:
4873 self._out += '<font' + font + '>'
4874 unclosed_elements.append('font')
4875 applied_style = {}
4876 if self._applied_styles:
4877 applied_style.update(self._applied_styles[-1])
4878 applied_style.update(style)
4879 self._applied_styles.append(applied_style)
4880 self._unclosed_elements.append(unclosed_elements)
bf6427d2 4881
2b14cb56 4882 def end(self, tag):
5b995f71
RA
4883 if tag not in (_x('ttml:br'), 'br'):
4884 unclosed_elements = self._unclosed_elements.pop()
4885 for element in reversed(unclosed_elements):
4886 self._out += '</%s>' % element
4887 if unclosed_elements and self._applied_styles:
4888 self._applied_styles.pop()
bf6427d2 4889
2b14cb56 4890 def data(self, data):
5b995f71 4891 self._out += data
2b14cb56 4892
4893 def close(self):
5b995f71 4894 return self._out.strip()
2b14cb56 4895
4896 def parse_node(node):
4897 target = TTMLPElementParser()
4898 parser = xml.etree.ElementTree.XMLParser(target=target)
4899 parser.feed(xml.etree.ElementTree.tostring(node))
4900 return parser.close()
bf6427d2 4901
5b995f71
RA
4902 for k, v in LEGACY_NAMESPACES:
4903 for ns in v:
4904 dfxp_data = dfxp_data.replace(ns, k)
4905
3869028f 4906 dfxp = compat_etree_fromstring(dfxp_data)
bf6427d2 4907 out = []
5b995f71 4908 paras = dfxp.findall(_x('.//ttml:p')) or dfxp.findall('.//p')
1b0427e6
YCH
4909
4910 if not paras:
4911 raise ValueError('Invalid dfxp/TTML subtitle')
bf6427d2 4912
5b995f71
RA
4913 repeat = False
4914 while True:
4915 for style in dfxp.findall(_x('.//ttml:style')):
261f4730
RA
4916 style_id = style.get('id') or style.get(_x('xml:id'))
4917 if not style_id:
4918 continue
5b995f71
RA
4919 parent_style_id = style.get('style')
4920 if parent_style_id:
4921 if parent_style_id not in styles:
4922 repeat = True
4923 continue
4924 styles[style_id] = styles[parent_style_id].copy()
4925 for prop in SUPPORTED_STYLING:
4926 prop_val = style.get(_x('tts:' + prop))
4927 if prop_val:
4928 styles.setdefault(style_id, {})[prop] = prop_val
4929 if repeat:
4930 repeat = False
4931 else:
4932 break
4933
4934 for p in ('body', 'div'):
4935 ele = xpath_element(dfxp, [_x('.//ttml:' + p), './/' + p])
4936 if ele is None:
4937 continue
4938 style = styles.get(ele.get('style'))
4939 if not style:
4940 continue
4941 default_style.update(style)
4942
bf6427d2 4943 for para, index in zip(paras, itertools.count(1)):
d631d5f9 4944 begin_time = parse_dfxp_time_expr(para.attrib.get('begin'))
7dff0363 4945 end_time = parse_dfxp_time_expr(para.attrib.get('end'))
d631d5f9
YCH
4946 dur = parse_dfxp_time_expr(para.attrib.get('dur'))
4947 if begin_time is None:
4948 continue
7dff0363 4949 if not end_time:
d631d5f9
YCH
4950 if not dur:
4951 continue
4952 end_time = begin_time + dur
bf6427d2
YCH
4953 out.append('%d\n%s --> %s\n%s\n\n' % (
4954 index,
c1c924ab
YCH
4955 srt_subtitles_timecode(begin_time),
4956 srt_subtitles_timecode(end_time),
bf6427d2
YCH
4957 parse_node(para)))
4958
4959 return ''.join(out)
4960
4961
66e289ba
S
4962def cli_option(params, command_option, param):
4963 param = params.get(param)
98e698f1
RA
4964 if param:
4965 param = compat_str(param)
66e289ba
S
4966 return [command_option, param] if param is not None else []
4967
4968
4969def cli_bool_option(params, command_option, param, true_value='true', false_value='false', separator=None):
4970 param = params.get(param)
5b232f46
S
4971 if param is None:
4972 return []
66e289ba
S
4973 assert isinstance(param, bool)
4974 if separator:
4975 return [command_option + separator + (true_value if param else false_value)]
4976 return [command_option, true_value if param else false_value]
4977
4978
4979def cli_valueless_option(params, command_option, param, expected_value=True):
4980 param = params.get(param)
4981 return [command_option] if param == expected_value else []
4982
4983
e92caff5 4984def cli_configuration_args(argdict, keys, default=[], use_compat=True):
eab9b2bc 4985 if isinstance(argdict, (list, tuple)): # for backward compatibility
e92caff5 4986 if use_compat:
5b1ecbb3 4987 return argdict
4988 else:
4989 argdict = None
eab9b2bc 4990 if argdict is None:
5b1ecbb3 4991 return default
eab9b2bc 4992 assert isinstance(argdict, dict)
4993
e92caff5 4994 assert isinstance(keys, (list, tuple))
4995 for key_list in keys:
e92caff5 4996 arg_list = list(filter(
4997 lambda x: x is not None,
6606817a 4998 [argdict.get(key.lower()) for key in variadic(key_list)]))
e92caff5 4999 if arg_list:
5000 return [arg for args in arg_list for arg in args]
5001 return default
66e289ba 5002
6251555f 5003
330690a2 5004def _configuration_args(main_key, argdict, exe, keys=None, default=[], use_compat=True):
5005 main_key, exe = main_key.lower(), exe.lower()
5006 root_key = exe if main_key == exe else f'{main_key}+{exe}'
5007 keys = [f'{root_key}{k}' for k in (keys or [''])]
5008 if root_key in keys:
5009 if main_key != exe:
5010 keys.append((main_key, exe))
5011 keys.append('default')
5012 else:
5013 use_compat = False
5014 return cli_configuration_args(argdict, keys, default, use_compat)
5015
66e289ba 5016
39672624
YCH
5017class ISO639Utils(object):
5018 # See http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt
5019 _lang_map = {
5020 'aa': 'aar',
5021 'ab': 'abk',
5022 'ae': 'ave',
5023 'af': 'afr',
5024 'ak': 'aka',
5025 'am': 'amh',
5026 'an': 'arg',
5027 'ar': 'ara',
5028 'as': 'asm',
5029 'av': 'ava',
5030 'ay': 'aym',
5031 'az': 'aze',
5032 'ba': 'bak',
5033 'be': 'bel',
5034 'bg': 'bul',
5035 'bh': 'bih',
5036 'bi': 'bis',
5037 'bm': 'bam',
5038 'bn': 'ben',
5039 'bo': 'bod',
5040 'br': 'bre',
5041 'bs': 'bos',
5042 'ca': 'cat',
5043 'ce': 'che',
5044 'ch': 'cha',
5045 'co': 'cos',
5046 'cr': 'cre',
5047 'cs': 'ces',
5048 'cu': 'chu',
5049 'cv': 'chv',
5050 'cy': 'cym',
5051 'da': 'dan',
5052 'de': 'deu',
5053 'dv': 'div',
5054 'dz': 'dzo',
5055 'ee': 'ewe',
5056 'el': 'ell',
5057 'en': 'eng',
5058 'eo': 'epo',
5059 'es': 'spa',
5060 'et': 'est',
5061 'eu': 'eus',
5062 'fa': 'fas',
5063 'ff': 'ful',
5064 'fi': 'fin',
5065 'fj': 'fij',
5066 'fo': 'fao',
5067 'fr': 'fra',
5068 'fy': 'fry',
5069 'ga': 'gle',
5070 'gd': 'gla',
5071 'gl': 'glg',
5072 'gn': 'grn',
5073 'gu': 'guj',
5074 'gv': 'glv',
5075 'ha': 'hau',
5076 'he': 'heb',
b7acc835 5077 'iw': 'heb', # Replaced by he in 1989 revision
39672624
YCH
5078 'hi': 'hin',
5079 'ho': 'hmo',
5080 'hr': 'hrv',
5081 'ht': 'hat',
5082 'hu': 'hun',
5083 'hy': 'hye',
5084 'hz': 'her',
5085 'ia': 'ina',
5086 'id': 'ind',
b7acc835 5087 'in': 'ind', # Replaced by id in 1989 revision
39672624
YCH
5088 'ie': 'ile',
5089 'ig': 'ibo',
5090 'ii': 'iii',
5091 'ik': 'ipk',
5092 'io': 'ido',
5093 'is': 'isl',
5094 'it': 'ita',
5095 'iu': 'iku',
5096 'ja': 'jpn',
5097 'jv': 'jav',
5098 'ka': 'kat',
5099 'kg': 'kon',
5100 'ki': 'kik',
5101 'kj': 'kua',
5102 'kk': 'kaz',
5103 'kl': 'kal',
5104 'km': 'khm',
5105 'kn': 'kan',
5106 'ko': 'kor',
5107 'kr': 'kau',
5108 'ks': 'kas',
5109 'ku': 'kur',
5110 'kv': 'kom',
5111 'kw': 'cor',
5112 'ky': 'kir',
5113 'la': 'lat',
5114 'lb': 'ltz',
5115 'lg': 'lug',
5116 'li': 'lim',
5117 'ln': 'lin',
5118 'lo': 'lao',
5119 'lt': 'lit',
5120 'lu': 'lub',
5121 'lv': 'lav',
5122 'mg': 'mlg',
5123 'mh': 'mah',
5124 'mi': 'mri',
5125 'mk': 'mkd',
5126 'ml': 'mal',
5127 'mn': 'mon',
5128 'mr': 'mar',
5129 'ms': 'msa',
5130 'mt': 'mlt',
5131 'my': 'mya',
5132 'na': 'nau',
5133 'nb': 'nob',
5134 'nd': 'nde',
5135 'ne': 'nep',
5136 'ng': 'ndo',
5137 'nl': 'nld',
5138 'nn': 'nno',
5139 'no': 'nor',
5140 'nr': 'nbl',
5141 'nv': 'nav',
5142 'ny': 'nya',
5143 'oc': 'oci',
5144 'oj': 'oji',
5145 'om': 'orm',
5146 'or': 'ori',
5147 'os': 'oss',
5148 'pa': 'pan',
5149 'pi': 'pli',
5150 'pl': 'pol',
5151 'ps': 'pus',
5152 'pt': 'por',
5153 'qu': 'que',
5154 'rm': 'roh',
5155 'rn': 'run',
5156 'ro': 'ron',
5157 'ru': 'rus',
5158 'rw': 'kin',
5159 'sa': 'san',
5160 'sc': 'srd',
5161 'sd': 'snd',
5162 'se': 'sme',
5163 'sg': 'sag',
5164 'si': 'sin',
5165 'sk': 'slk',
5166 'sl': 'slv',
5167 'sm': 'smo',
5168 'sn': 'sna',
5169 'so': 'som',
5170 'sq': 'sqi',
5171 'sr': 'srp',
5172 'ss': 'ssw',
5173 'st': 'sot',
5174 'su': 'sun',
5175 'sv': 'swe',
5176 'sw': 'swa',
5177 'ta': 'tam',
5178 'te': 'tel',
5179 'tg': 'tgk',
5180 'th': 'tha',
5181 'ti': 'tir',
5182 'tk': 'tuk',
5183 'tl': 'tgl',
5184 'tn': 'tsn',
5185 'to': 'ton',
5186 'tr': 'tur',
5187 'ts': 'tso',
5188 'tt': 'tat',
5189 'tw': 'twi',
5190 'ty': 'tah',
5191 'ug': 'uig',
5192 'uk': 'ukr',
5193 'ur': 'urd',
5194 'uz': 'uzb',
5195 've': 'ven',
5196 'vi': 'vie',
5197 'vo': 'vol',
5198 'wa': 'wln',
5199 'wo': 'wol',
5200 'xh': 'xho',
5201 'yi': 'yid',
e9a50fba 5202 'ji': 'yid', # Replaced by yi in 1989 revision
39672624
YCH
5203 'yo': 'yor',
5204 'za': 'zha',
5205 'zh': 'zho',
5206 'zu': 'zul',
5207 }
5208
5209 @classmethod
5210 def short2long(cls, code):
5211 """Convert language code from ISO 639-1 to ISO 639-2/T"""
5212 return cls._lang_map.get(code[:2])
5213
5214 @classmethod
5215 def long2short(cls, code):
5216 """Convert language code from ISO 639-2/T to ISO 639-1"""
5217 for short_name, long_name in cls._lang_map.items():
5218 if long_name == code:
5219 return short_name
5220
5221
4eb10f66
YCH
5222class ISO3166Utils(object):
5223 # From http://data.okfn.org/data/core/country-list
5224 _country_map = {
5225 'AF': 'Afghanistan',
5226 'AX': 'Åland Islands',
5227 'AL': 'Albania',
5228 'DZ': 'Algeria',
5229 'AS': 'American Samoa',
5230 'AD': 'Andorra',
5231 'AO': 'Angola',
5232 'AI': 'Anguilla',
5233 'AQ': 'Antarctica',
5234 'AG': 'Antigua and Barbuda',
5235 'AR': 'Argentina',
5236 'AM': 'Armenia',
5237 'AW': 'Aruba',
5238 'AU': 'Australia',
5239 'AT': 'Austria',
5240 'AZ': 'Azerbaijan',
5241 'BS': 'Bahamas',
5242 'BH': 'Bahrain',
5243 'BD': 'Bangladesh',
5244 'BB': 'Barbados',
5245 'BY': 'Belarus',
5246 'BE': 'Belgium',
5247 'BZ': 'Belize',
5248 'BJ': 'Benin',
5249 'BM': 'Bermuda',
5250 'BT': 'Bhutan',
5251 'BO': 'Bolivia, Plurinational State of',
5252 'BQ': 'Bonaire, Sint Eustatius and Saba',
5253 'BA': 'Bosnia and Herzegovina',
5254 'BW': 'Botswana',
5255 'BV': 'Bouvet Island',
5256 'BR': 'Brazil',
5257 'IO': 'British Indian Ocean Territory',
5258 'BN': 'Brunei Darussalam',
5259 'BG': 'Bulgaria',
5260 'BF': 'Burkina Faso',
5261 'BI': 'Burundi',
5262 'KH': 'Cambodia',
5263 'CM': 'Cameroon',
5264 'CA': 'Canada',
5265 'CV': 'Cape Verde',
5266 'KY': 'Cayman Islands',
5267 'CF': 'Central African Republic',
5268 'TD': 'Chad',
5269 'CL': 'Chile',
5270 'CN': 'China',
5271 'CX': 'Christmas Island',
5272 'CC': 'Cocos (Keeling) Islands',
5273 'CO': 'Colombia',
5274 'KM': 'Comoros',
5275 'CG': 'Congo',
5276 'CD': 'Congo, the Democratic Republic of the',
5277 'CK': 'Cook Islands',
5278 'CR': 'Costa Rica',
5279 'CI': 'Côte d\'Ivoire',
5280 'HR': 'Croatia',
5281 'CU': 'Cuba',
5282 'CW': 'Curaçao',
5283 'CY': 'Cyprus',
5284 'CZ': 'Czech Republic',
5285 'DK': 'Denmark',
5286 'DJ': 'Djibouti',
5287 'DM': 'Dominica',
5288 'DO': 'Dominican Republic',
5289 'EC': 'Ecuador',
5290 'EG': 'Egypt',
5291 'SV': 'El Salvador',
5292 'GQ': 'Equatorial Guinea',
5293 'ER': 'Eritrea',
5294 'EE': 'Estonia',
5295 'ET': 'Ethiopia',
5296 'FK': 'Falkland Islands (Malvinas)',
5297 'FO': 'Faroe Islands',
5298 'FJ': 'Fiji',
5299 'FI': 'Finland',
5300 'FR': 'France',
5301 'GF': 'French Guiana',
5302 'PF': 'French Polynesia',
5303 'TF': 'French Southern Territories',
5304 'GA': 'Gabon',
5305 'GM': 'Gambia',
5306 'GE': 'Georgia',
5307 'DE': 'Germany',
5308 'GH': 'Ghana',
5309 'GI': 'Gibraltar',
5310 'GR': 'Greece',
5311 'GL': 'Greenland',
5312 'GD': 'Grenada',
5313 'GP': 'Guadeloupe',
5314 'GU': 'Guam',
5315 'GT': 'Guatemala',
5316 'GG': 'Guernsey',
5317 'GN': 'Guinea',
5318 'GW': 'Guinea-Bissau',
5319 'GY': 'Guyana',
5320 'HT': 'Haiti',
5321 'HM': 'Heard Island and McDonald Islands',
5322 'VA': 'Holy See (Vatican City State)',
5323 'HN': 'Honduras',
5324 'HK': 'Hong Kong',
5325 'HU': 'Hungary',
5326 'IS': 'Iceland',
5327 'IN': 'India',
5328 'ID': 'Indonesia',
5329 'IR': 'Iran, Islamic Republic of',
5330 'IQ': 'Iraq',
5331 'IE': 'Ireland',
5332 'IM': 'Isle of Man',
5333 'IL': 'Israel',
5334 'IT': 'Italy',
5335 'JM': 'Jamaica',
5336 'JP': 'Japan',
5337 'JE': 'Jersey',
5338 'JO': 'Jordan',
5339 'KZ': 'Kazakhstan',
5340 'KE': 'Kenya',
5341 'KI': 'Kiribati',
5342 'KP': 'Korea, Democratic People\'s Republic of',
5343 'KR': 'Korea, Republic of',
5344 'KW': 'Kuwait',
5345 'KG': 'Kyrgyzstan',
5346 'LA': 'Lao People\'s Democratic Republic',
5347 'LV': 'Latvia',
5348 'LB': 'Lebanon',
5349 'LS': 'Lesotho',
5350 'LR': 'Liberia',
5351 'LY': 'Libya',
5352 'LI': 'Liechtenstein',
5353 'LT': 'Lithuania',
5354 'LU': 'Luxembourg',
5355 'MO': 'Macao',
5356 'MK': 'Macedonia, the Former Yugoslav Republic of',
5357 'MG': 'Madagascar',
5358 'MW': 'Malawi',
5359 'MY': 'Malaysia',
5360 'MV': 'Maldives',
5361 'ML': 'Mali',
5362 'MT': 'Malta',
5363 'MH': 'Marshall Islands',
5364 'MQ': 'Martinique',
5365 'MR': 'Mauritania',
5366 'MU': 'Mauritius',
5367 'YT': 'Mayotte',
5368 'MX': 'Mexico',
5369 'FM': 'Micronesia, Federated States of',
5370 'MD': 'Moldova, Republic of',
5371 'MC': 'Monaco',
5372 'MN': 'Mongolia',
5373 'ME': 'Montenegro',
5374 'MS': 'Montserrat',
5375 'MA': 'Morocco',
5376 'MZ': 'Mozambique',
5377 'MM': 'Myanmar',
5378 'NA': 'Namibia',
5379 'NR': 'Nauru',
5380 'NP': 'Nepal',
5381 'NL': 'Netherlands',
5382 'NC': 'New Caledonia',
5383 'NZ': 'New Zealand',
5384 'NI': 'Nicaragua',
5385 'NE': 'Niger',
5386 'NG': 'Nigeria',
5387 'NU': 'Niue',
5388 'NF': 'Norfolk Island',
5389 'MP': 'Northern Mariana Islands',
5390 'NO': 'Norway',
5391 'OM': 'Oman',
5392 'PK': 'Pakistan',
5393 'PW': 'Palau',
5394 'PS': 'Palestine, State of',
5395 'PA': 'Panama',
5396 'PG': 'Papua New Guinea',
5397 'PY': 'Paraguay',
5398 'PE': 'Peru',
5399 'PH': 'Philippines',
5400 'PN': 'Pitcairn',
5401 'PL': 'Poland',
5402 'PT': 'Portugal',
5403 'PR': 'Puerto Rico',
5404 'QA': 'Qatar',
5405 'RE': 'Réunion',
5406 'RO': 'Romania',
5407 'RU': 'Russian Federation',
5408 'RW': 'Rwanda',
5409 'BL': 'Saint Barthélemy',
5410 'SH': 'Saint Helena, Ascension and Tristan da Cunha',
5411 'KN': 'Saint Kitts and Nevis',
5412 'LC': 'Saint Lucia',
5413 'MF': 'Saint Martin (French part)',
5414 'PM': 'Saint Pierre and Miquelon',
5415 'VC': 'Saint Vincent and the Grenadines',
5416 'WS': 'Samoa',
5417 'SM': 'San Marino',
5418 'ST': 'Sao Tome and Principe',
5419 'SA': 'Saudi Arabia',
5420 'SN': 'Senegal',
5421 'RS': 'Serbia',
5422 'SC': 'Seychelles',
5423 'SL': 'Sierra Leone',
5424 'SG': 'Singapore',
5425 'SX': 'Sint Maarten (Dutch part)',
5426 'SK': 'Slovakia',
5427 'SI': 'Slovenia',
5428 'SB': 'Solomon Islands',
5429 'SO': 'Somalia',
5430 'ZA': 'South Africa',
5431 'GS': 'South Georgia and the South Sandwich Islands',
5432 'SS': 'South Sudan',
5433 'ES': 'Spain',
5434 'LK': 'Sri Lanka',
5435 'SD': 'Sudan',
5436 'SR': 'Suriname',
5437 'SJ': 'Svalbard and Jan Mayen',
5438 'SZ': 'Swaziland',
5439 'SE': 'Sweden',
5440 'CH': 'Switzerland',
5441 'SY': 'Syrian Arab Republic',
5442 'TW': 'Taiwan, Province of China',
5443 'TJ': 'Tajikistan',
5444 'TZ': 'Tanzania, United Republic of',
5445 'TH': 'Thailand',
5446 'TL': 'Timor-Leste',
5447 'TG': 'Togo',
5448 'TK': 'Tokelau',
5449 'TO': 'Tonga',
5450 'TT': 'Trinidad and Tobago',
5451 'TN': 'Tunisia',
5452 'TR': 'Turkey',
5453 'TM': 'Turkmenistan',
5454 'TC': 'Turks and Caicos Islands',
5455 'TV': 'Tuvalu',
5456 'UG': 'Uganda',
5457 'UA': 'Ukraine',
5458 'AE': 'United Arab Emirates',
5459 'GB': 'United Kingdom',
5460 'US': 'United States',
5461 'UM': 'United States Minor Outlying Islands',
5462 'UY': 'Uruguay',
5463 'UZ': 'Uzbekistan',
5464 'VU': 'Vanuatu',
5465 'VE': 'Venezuela, Bolivarian Republic of',
5466 'VN': 'Viet Nam',
5467 'VG': 'Virgin Islands, British',
5468 'VI': 'Virgin Islands, U.S.',
5469 'WF': 'Wallis and Futuna',
5470 'EH': 'Western Sahara',
5471 'YE': 'Yemen',
5472 'ZM': 'Zambia',
5473 'ZW': 'Zimbabwe',
5474 }
5475
5476 @classmethod
5477 def short2full(cls, code):
5478 """Convert an ISO 3166-2 country code to the corresponding full name"""
5479 return cls._country_map.get(code.upper())
5480
5481
773f291d
S
5482class GeoUtils(object):
5483 # Major IPv4 address blocks per country
5484 _country_ip_map = {
53896ca5 5485 'AD': '46.172.224.0/19',
773f291d
S
5486 'AE': '94.200.0.0/13',
5487 'AF': '149.54.0.0/17',
5488 'AG': '209.59.64.0/18',
5489 'AI': '204.14.248.0/21',
5490 'AL': '46.99.0.0/16',
5491 'AM': '46.70.0.0/15',
5492 'AO': '105.168.0.0/13',
53896ca5
S
5493 'AP': '182.50.184.0/21',
5494 'AQ': '23.154.160.0/24',
773f291d
S
5495 'AR': '181.0.0.0/12',
5496 'AS': '202.70.112.0/20',
53896ca5 5497 'AT': '77.116.0.0/14',
773f291d
S
5498 'AU': '1.128.0.0/11',
5499 'AW': '181.41.0.0/18',
53896ca5
S
5500 'AX': '185.217.4.0/22',
5501 'AZ': '5.197.0.0/16',
773f291d
S
5502 'BA': '31.176.128.0/17',
5503 'BB': '65.48.128.0/17',
5504 'BD': '114.130.0.0/16',
5505 'BE': '57.0.0.0/8',
53896ca5 5506 'BF': '102.178.0.0/15',
773f291d
S
5507 'BG': '95.42.0.0/15',
5508 'BH': '37.131.0.0/17',
5509 'BI': '154.117.192.0/18',
5510 'BJ': '137.255.0.0/16',
53896ca5 5511 'BL': '185.212.72.0/23',
773f291d
S
5512 'BM': '196.12.64.0/18',
5513 'BN': '156.31.0.0/16',
5514 'BO': '161.56.0.0/16',
5515 'BQ': '161.0.80.0/20',
53896ca5 5516 'BR': '191.128.0.0/12',
773f291d
S
5517 'BS': '24.51.64.0/18',
5518 'BT': '119.2.96.0/19',
5519 'BW': '168.167.0.0/16',
5520 'BY': '178.120.0.0/13',
5521 'BZ': '179.42.192.0/18',
5522 'CA': '99.224.0.0/11',
5523 'CD': '41.243.0.0/16',
53896ca5
S
5524 'CF': '197.242.176.0/21',
5525 'CG': '160.113.0.0/16',
773f291d 5526 'CH': '85.0.0.0/13',
53896ca5 5527 'CI': '102.136.0.0/14',
773f291d
S
5528 'CK': '202.65.32.0/19',
5529 'CL': '152.172.0.0/14',
53896ca5 5530 'CM': '102.244.0.0/14',
773f291d
S
5531 'CN': '36.128.0.0/10',
5532 'CO': '181.240.0.0/12',
5533 'CR': '201.192.0.0/12',
5534 'CU': '152.206.0.0/15',
5535 'CV': '165.90.96.0/19',
5536 'CW': '190.88.128.0/17',
53896ca5 5537 'CY': '31.153.0.0/16',
773f291d
S
5538 'CZ': '88.100.0.0/14',
5539 'DE': '53.0.0.0/8',
5540 'DJ': '197.241.0.0/17',
5541 'DK': '87.48.0.0/12',
5542 'DM': '192.243.48.0/20',
5543 'DO': '152.166.0.0/15',
5544 'DZ': '41.96.0.0/12',
5545 'EC': '186.68.0.0/15',
5546 'EE': '90.190.0.0/15',
5547 'EG': '156.160.0.0/11',
5548 'ER': '196.200.96.0/20',
5549 'ES': '88.0.0.0/11',
5550 'ET': '196.188.0.0/14',
5551 'EU': '2.16.0.0/13',
5552 'FI': '91.152.0.0/13',
5553 'FJ': '144.120.0.0/16',
53896ca5 5554 'FK': '80.73.208.0/21',
773f291d
S
5555 'FM': '119.252.112.0/20',
5556 'FO': '88.85.32.0/19',
5557 'FR': '90.0.0.0/9',
5558 'GA': '41.158.0.0/15',
5559 'GB': '25.0.0.0/8',
5560 'GD': '74.122.88.0/21',
5561 'GE': '31.146.0.0/16',
5562 'GF': '161.22.64.0/18',
5563 'GG': '62.68.160.0/19',
53896ca5
S
5564 'GH': '154.160.0.0/12',
5565 'GI': '95.164.0.0/16',
773f291d
S
5566 'GL': '88.83.0.0/19',
5567 'GM': '160.182.0.0/15',
5568 'GN': '197.149.192.0/18',
5569 'GP': '104.250.0.0/19',
5570 'GQ': '105.235.224.0/20',
5571 'GR': '94.64.0.0/13',
5572 'GT': '168.234.0.0/16',
5573 'GU': '168.123.0.0/16',
5574 'GW': '197.214.80.0/20',
5575 'GY': '181.41.64.0/18',
5576 'HK': '113.252.0.0/14',
5577 'HN': '181.210.0.0/16',
5578 'HR': '93.136.0.0/13',
5579 'HT': '148.102.128.0/17',
5580 'HU': '84.0.0.0/14',
5581 'ID': '39.192.0.0/10',
5582 'IE': '87.32.0.0/12',
5583 'IL': '79.176.0.0/13',
5584 'IM': '5.62.80.0/20',
5585 'IN': '117.192.0.0/10',
5586 'IO': '203.83.48.0/21',
5587 'IQ': '37.236.0.0/14',
5588 'IR': '2.176.0.0/12',
5589 'IS': '82.221.0.0/16',
5590 'IT': '79.0.0.0/10',
5591 'JE': '87.244.64.0/18',
5592 'JM': '72.27.0.0/17',
5593 'JO': '176.29.0.0/16',
53896ca5 5594 'JP': '133.0.0.0/8',
773f291d
S
5595 'KE': '105.48.0.0/12',
5596 'KG': '158.181.128.0/17',
5597 'KH': '36.37.128.0/17',
5598 'KI': '103.25.140.0/22',
5599 'KM': '197.255.224.0/20',
53896ca5 5600 'KN': '198.167.192.0/19',
773f291d
S
5601 'KP': '175.45.176.0/22',
5602 'KR': '175.192.0.0/10',
5603 'KW': '37.36.0.0/14',
5604 'KY': '64.96.0.0/15',
5605 'KZ': '2.72.0.0/13',
5606 'LA': '115.84.64.0/18',
5607 'LB': '178.135.0.0/16',
53896ca5 5608 'LC': '24.92.144.0/20',
773f291d
S
5609 'LI': '82.117.0.0/19',
5610 'LK': '112.134.0.0/15',
53896ca5 5611 'LR': '102.183.0.0/16',
773f291d
S
5612 'LS': '129.232.0.0/17',
5613 'LT': '78.56.0.0/13',
5614 'LU': '188.42.0.0/16',
5615 'LV': '46.109.0.0/16',
5616 'LY': '41.252.0.0/14',
5617 'MA': '105.128.0.0/11',
5618 'MC': '88.209.64.0/18',
5619 'MD': '37.246.0.0/16',
5620 'ME': '178.175.0.0/17',
5621 'MF': '74.112.232.0/21',
5622 'MG': '154.126.0.0/17',
5623 'MH': '117.103.88.0/21',
5624 'MK': '77.28.0.0/15',
5625 'ML': '154.118.128.0/18',
5626 'MM': '37.111.0.0/17',
5627 'MN': '49.0.128.0/17',
5628 'MO': '60.246.0.0/16',
5629 'MP': '202.88.64.0/20',
5630 'MQ': '109.203.224.0/19',
5631 'MR': '41.188.64.0/18',
5632 'MS': '208.90.112.0/22',
5633 'MT': '46.11.0.0/16',
5634 'MU': '105.16.0.0/12',
5635 'MV': '27.114.128.0/18',
53896ca5 5636 'MW': '102.70.0.0/15',
773f291d
S
5637 'MX': '187.192.0.0/11',
5638 'MY': '175.136.0.0/13',
5639 'MZ': '197.218.0.0/15',
5640 'NA': '41.182.0.0/16',
5641 'NC': '101.101.0.0/18',
5642 'NE': '197.214.0.0/18',
5643 'NF': '203.17.240.0/22',
5644 'NG': '105.112.0.0/12',
5645 'NI': '186.76.0.0/15',
5646 'NL': '145.96.0.0/11',
5647 'NO': '84.208.0.0/13',
5648 'NP': '36.252.0.0/15',
5649 'NR': '203.98.224.0/19',
5650 'NU': '49.156.48.0/22',
5651 'NZ': '49.224.0.0/14',
5652 'OM': '5.36.0.0/15',
5653 'PA': '186.72.0.0/15',
5654 'PE': '186.160.0.0/14',
5655 'PF': '123.50.64.0/18',
5656 'PG': '124.240.192.0/19',
5657 'PH': '49.144.0.0/13',
5658 'PK': '39.32.0.0/11',
5659 'PL': '83.0.0.0/11',
5660 'PM': '70.36.0.0/20',
5661 'PR': '66.50.0.0/16',
5662 'PS': '188.161.0.0/16',
5663 'PT': '85.240.0.0/13',
5664 'PW': '202.124.224.0/20',
5665 'PY': '181.120.0.0/14',
5666 'QA': '37.210.0.0/15',
53896ca5 5667 'RE': '102.35.0.0/16',
773f291d 5668 'RO': '79.112.0.0/13',
53896ca5 5669 'RS': '93.86.0.0/15',
773f291d 5670 'RU': '5.136.0.0/13',
53896ca5 5671 'RW': '41.186.0.0/16',
773f291d
S
5672 'SA': '188.48.0.0/13',
5673 'SB': '202.1.160.0/19',
5674 'SC': '154.192.0.0/11',
53896ca5 5675 'SD': '102.120.0.0/13',
773f291d 5676 'SE': '78.64.0.0/12',
53896ca5 5677 'SG': '8.128.0.0/10',
773f291d
S
5678 'SI': '188.196.0.0/14',
5679 'SK': '78.98.0.0/15',
53896ca5 5680 'SL': '102.143.0.0/17',
773f291d
S
5681 'SM': '89.186.32.0/19',
5682 'SN': '41.82.0.0/15',
53896ca5 5683 'SO': '154.115.192.0/18',
773f291d
S
5684 'SR': '186.179.128.0/17',
5685 'SS': '105.235.208.0/21',
5686 'ST': '197.159.160.0/19',
5687 'SV': '168.243.0.0/16',
5688 'SX': '190.102.0.0/20',
5689 'SY': '5.0.0.0/16',
5690 'SZ': '41.84.224.0/19',
5691 'TC': '65.255.48.0/20',
5692 'TD': '154.68.128.0/19',
5693 'TG': '196.168.0.0/14',
5694 'TH': '171.96.0.0/13',
5695 'TJ': '85.9.128.0/18',
5696 'TK': '27.96.24.0/21',
5697 'TL': '180.189.160.0/20',
5698 'TM': '95.85.96.0/19',
5699 'TN': '197.0.0.0/11',
5700 'TO': '175.176.144.0/21',
5701 'TR': '78.160.0.0/11',
5702 'TT': '186.44.0.0/15',
5703 'TV': '202.2.96.0/19',
5704 'TW': '120.96.0.0/11',
5705 'TZ': '156.156.0.0/14',
53896ca5
S
5706 'UA': '37.52.0.0/14',
5707 'UG': '102.80.0.0/13',
5708 'US': '6.0.0.0/8',
773f291d 5709 'UY': '167.56.0.0/13',
53896ca5 5710 'UZ': '84.54.64.0/18',
773f291d 5711 'VA': '212.77.0.0/19',
53896ca5 5712 'VC': '207.191.240.0/21',
773f291d 5713 'VE': '186.88.0.0/13',
53896ca5 5714 'VG': '66.81.192.0/20',
773f291d
S
5715 'VI': '146.226.0.0/16',
5716 'VN': '14.160.0.0/11',
5717 'VU': '202.80.32.0/20',
5718 'WF': '117.20.32.0/21',
5719 'WS': '202.4.32.0/19',
5720 'YE': '134.35.0.0/16',
5721 'YT': '41.242.116.0/22',
5722 'ZA': '41.0.0.0/11',
53896ca5
S
5723 'ZM': '102.144.0.0/13',
5724 'ZW': '102.177.192.0/18',
773f291d
S
5725 }
5726
5727 @classmethod
5f95927a
S
5728 def random_ipv4(cls, code_or_block):
5729 if len(code_or_block) == 2:
5730 block = cls._country_ip_map.get(code_or_block.upper())
5731 if not block:
5732 return None
5733 else:
5734 block = code_or_block
773f291d
S
5735 addr, preflen = block.split('/')
5736 addr_min = compat_struct_unpack('!L', socket.inet_aton(addr))[0]
5737 addr_max = addr_min | (0xffffffff >> int(preflen))
18a0defa 5738 return compat_str(socket.inet_ntoa(
4248dad9 5739 compat_struct_pack('!L', random.randint(addr_min, addr_max))))
773f291d
S
5740
5741
91410c9b 5742class PerRequestProxyHandler(compat_urllib_request.ProxyHandler):
2461f79d
PH
5743 def __init__(self, proxies=None):
5744 # Set default handlers
5745 for type in ('http', 'https'):
5746 setattr(self, '%s_open' % type,
5747 lambda r, proxy='__noproxy__', type=type, meth=self.proxy_open:
5748 meth(r, proxy, type))
38e87f6c 5749 compat_urllib_request.ProxyHandler.__init__(self, proxies)
2461f79d 5750
91410c9b 5751 def proxy_open(self, req, proxy, type):
2461f79d 5752 req_proxy = req.headers.get('Ytdl-request-proxy')
91410c9b
PH
5753 if req_proxy is not None:
5754 proxy = req_proxy
2461f79d
PH
5755 del req.headers['Ytdl-request-proxy']
5756
5757 if proxy == '__noproxy__':
5758 return None # No Proxy
51fb4995 5759 if compat_urlparse.urlparse(proxy).scheme.lower() in ('socks', 'socks4', 'socks4a', 'socks5'):
71aff188 5760 req.add_header('Ytdl-socks-proxy', proxy)
7a5c1cfe 5761 # yt-dlp's http/https handlers do wrapping the socket with socks
71aff188 5762 return None
91410c9b
PH
5763 return compat_urllib_request.ProxyHandler.proxy_open(
5764 self, req, proxy, type)
5bc880b9
YCH
5765
5766
0a5445dd
YCH
5767# Both long_to_bytes and bytes_to_long are adapted from PyCrypto, which is
5768# released into Public Domain
5769# https://github.com/dlitz/pycrypto/blob/master/lib/Crypto/Util/number.py#L387
5770
5771def long_to_bytes(n, blocksize=0):
5772 """long_to_bytes(n:long, blocksize:int) : string
5773 Convert a long integer to a byte string.
5774
5775 If optional blocksize is given and greater than zero, pad the front of the
5776 byte string with binary zeros so that the length is a multiple of
5777 blocksize.
5778 """
5779 # after much testing, this algorithm was deemed to be the fastest
5780 s = b''
5781 n = int(n)
5782 while n > 0:
5783 s = compat_struct_pack('>I', n & 0xffffffff) + s
5784 n = n >> 32
5785 # strip off leading zeros
5786 for i in range(len(s)):
5787 if s[i] != b'\000'[0]:
5788 break
5789 else:
5790 # only happens when n == 0
5791 s = b'\000'
5792 i = 0
5793 s = s[i:]
5794 # add back some pad bytes. this could be done more efficiently w.r.t. the
5795 # de-padding being done above, but sigh...
5796 if blocksize > 0 and len(s) % blocksize:
5797 s = (blocksize - len(s) % blocksize) * b'\000' + s
5798 return s
5799
5800
5801def bytes_to_long(s):
5802 """bytes_to_long(string) : long
5803 Convert a byte string to a long integer.
5804
5805 This is (essentially) the inverse of long_to_bytes().
5806 """
5807 acc = 0
5808 length = len(s)
5809 if length % 4:
5810 extra = (4 - length % 4)
5811 s = b'\000' * extra + s
5812 length = length + extra
5813 for i in range(0, length, 4):
5814 acc = (acc << 32) + compat_struct_unpack('>I', s[i:i + 4])[0]
5815 return acc
5816
5817
5bc880b9
YCH
5818def ohdave_rsa_encrypt(data, exponent, modulus):
5819 '''
5820 Implement OHDave's RSA algorithm. See http://www.ohdave.com/rsa/
5821
5822 Input:
5823 data: data to encrypt, bytes-like object
5824 exponent, modulus: parameter e and N of RSA algorithm, both integer
5825 Output: hex string of encrypted data
5826
5827 Limitation: supports one block encryption only
5828 '''
5829
5830 payload = int(binascii.hexlify(data[::-1]), 16)
5831 encrypted = pow(payload, exponent, modulus)
5832 return '%x' % encrypted
81bdc8fd
YCH
5833
5834
f48409c7
YCH
5835def pkcs1pad(data, length):
5836 """
5837 Padding input data with PKCS#1 scheme
5838
5839 @param {int[]} data input data
5840 @param {int} length target length
5841 @returns {int[]} padded data
5842 """
5843 if len(data) > length - 11:
5844 raise ValueError('Input data too long for PKCS#1 padding')
5845
5846 pseudo_random = [random.randint(0, 254) for _ in range(length - len(data) - 3)]
5847 return [0, 2] + pseudo_random + [0] + data
5848
5849
5eb6bdce 5850def encode_base_n(num, n, table=None):
59f898b7 5851 FULL_TABLE = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
59f898b7
YCH
5852 if not table:
5853 table = FULL_TABLE[:n]
5854
5eb6bdce
YCH
5855 if n > len(table):
5856 raise ValueError('base %d exceeds table length %d' % (n, len(table)))
5857
5858 if num == 0:
5859 return table[0]
5860
81bdc8fd
YCH
5861 ret = ''
5862 while num:
5863 ret = table[num % n] + ret
5864 num = num // n
5865 return ret
f52354a8
YCH
5866
5867
5868def decode_packed_codes(code):
06b3fe29 5869 mobj = re.search(PACKED_CODES_RE, code)
a0566bbf 5870 obfuscated_code, base, count, symbols = mobj.groups()
f52354a8
YCH
5871 base = int(base)
5872 count = int(count)
5873 symbols = symbols.split('|')
5874 symbol_table = {}
5875
5876 while count:
5877 count -= 1
5eb6bdce 5878 base_n_count = encode_base_n(count, base)
f52354a8
YCH
5879 symbol_table[base_n_count] = symbols[count] or base_n_count
5880
5881 return re.sub(
5882 r'\b(\w+)\b', lambda mobj: symbol_table[mobj.group(0)],
a0566bbf 5883 obfuscated_code)
e154c651 5884
5885
1ced2221
S
5886def caesar(s, alphabet, shift):
5887 if shift == 0:
5888 return s
5889 l = len(alphabet)
5890 return ''.join(
5891 alphabet[(alphabet.index(c) + shift) % l] if c in alphabet else c
5892 for c in s)
5893
5894
5895def rot47(s):
5896 return caesar(s, r'''!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~''', 47)
5897
5898
e154c651 5899def parse_m3u8_attributes(attrib):
5900 info = {}
5901 for (key, val) in re.findall(r'(?P<key>[A-Z0-9-]+)=(?P<val>"[^"]+"|[^",]+)(?:,|$)', attrib):
5902 if val.startswith('"'):
5903 val = val[1:-1]
5904 info[key] = val
5905 return info
1143535d
YCH
5906
5907
5908def urshift(val, n):
5909 return val >> n if val >= 0 else (val + 0x100000000) >> n
d3f8e038
YCH
5910
5911
5912# Based on png2str() written by @gdkchan and improved by @yokrysty
067aa17e 5913# Originally posted at https://github.com/ytdl-org/youtube-dl/issues/9706
d3f8e038
YCH
5914def decode_png(png_data):
5915 # Reference: https://www.w3.org/TR/PNG/
5916 header = png_data[8:]
5917
5918 if png_data[:8] != b'\x89PNG\x0d\x0a\x1a\x0a' or header[4:8] != b'IHDR':
5919 raise IOError('Not a valid PNG file.')
5920
5921 int_map = {1: '>B', 2: '>H', 4: '>I'}
5922 unpack_integer = lambda x: compat_struct_unpack(int_map[len(x)], x)[0]
5923
5924 chunks = []
5925
5926 while header:
5927 length = unpack_integer(header[:4])
5928 header = header[4:]
5929
5930 chunk_type = header[:4]
5931 header = header[4:]
5932
5933 chunk_data = header[:length]
5934 header = header[length:]
5935
5936 header = header[4:] # Skip CRC
5937
5938 chunks.append({
5939 'type': chunk_type,
5940 'length': length,
5941 'data': chunk_data
5942 })
5943
5944 ihdr = chunks[0]['data']
5945
5946 width = unpack_integer(ihdr[:4])
5947 height = unpack_integer(ihdr[4:8])
5948
5949 idat = b''
5950
5951 for chunk in chunks:
5952 if chunk['type'] == b'IDAT':
5953 idat += chunk['data']
5954
5955 if not idat:
5956 raise IOError('Unable to read PNG data.')
5957
5958 decompressed_data = bytearray(zlib.decompress(idat))
5959
5960 stride = width * 3
5961 pixels = []
5962
5963 def _get_pixel(idx):
5964 x = idx % stride
5965 y = idx // stride
5966 return pixels[y][x]
5967
5968 for y in range(height):
5969 basePos = y * (1 + stride)
5970 filter_type = decompressed_data[basePos]
5971
5972 current_row = []
5973
5974 pixels.append(current_row)
5975
5976 for x in range(stride):
5977 color = decompressed_data[1 + basePos + x]
5978 basex = y * stride + x
5979 left = 0
5980 up = 0
5981
5982 if x > 2:
5983 left = _get_pixel(basex - 3)
5984 if y > 0:
5985 up = _get_pixel(basex - stride)
5986
5987 if filter_type == 1: # Sub
5988 color = (color + left) & 0xff
5989 elif filter_type == 2: # Up
5990 color = (color + up) & 0xff
5991 elif filter_type == 3: # Average
5992 color = (color + ((left + up) >> 1)) & 0xff
5993 elif filter_type == 4: # Paeth
5994 a = left
5995 b = up
5996 c = 0
5997
5998 if x > 2 and y > 0:
5999 c = _get_pixel(basex - stride - 3)
6000
6001 p = a + b - c
6002
6003 pa = abs(p - a)
6004 pb = abs(p - b)
6005 pc = abs(p - c)
6006
6007 if pa <= pb and pa <= pc:
6008 color = (color + a) & 0xff
6009 elif pb <= pc:
6010 color = (color + b) & 0xff
6011 else:
6012 color = (color + c) & 0xff
6013
6014 current_row.append(color)
6015
6016 return width, height, pixels
efa97bdc
YCH
6017
6018
6019def write_xattr(path, key, value):
6020 # This mess below finds the best xattr tool for the job
6021 try:
6022 # try the pyxattr module...
6023 import xattr
6024
53a7e3d2
YCH
6025 if hasattr(xattr, 'set'): # pyxattr
6026 # Unicode arguments are not supported in python-pyxattr until
6027 # version 0.5.0
067aa17e 6028 # See https://github.com/ytdl-org/youtube-dl/issues/5498
53a7e3d2
YCH
6029 pyxattr_required_version = '0.5.0'
6030 if version_tuple(xattr.__version__) < version_tuple(pyxattr_required_version):
6031 # TODO: fallback to CLI tools
6032 raise XAttrUnavailableError(
6033 'python-pyxattr is detected but is too old. '
7a5c1cfe 6034 'yt-dlp requires %s or above while your version is %s. '
53a7e3d2
YCH
6035 'Falling back to other xattr implementations' % (
6036 pyxattr_required_version, xattr.__version__))
6037
6038 setxattr = xattr.set
6039 else: # xattr
6040 setxattr = xattr.setxattr
efa97bdc
YCH
6041
6042 try:
53a7e3d2 6043 setxattr(path, key, value)
efa97bdc
YCH
6044 except EnvironmentError as e:
6045 raise XAttrMetadataError(e.errno, e.strerror)
6046
6047 except ImportError:
6048 if compat_os_name == 'nt':
6049 # Write xattrs to NTFS Alternate Data Streams:
6050 # http://en.wikipedia.org/wiki/NTFS#Alternate_data_streams_.28ADS.29
6051 assert ':' not in key
6052 assert os.path.exists(path)
6053
6054 ads_fn = path + ':' + key
6055 try:
6056 with open(ads_fn, 'wb') as f:
6057 f.write(value)
6058 except EnvironmentError as e:
6059 raise XAttrMetadataError(e.errno, e.strerror)
6060 else:
6061 user_has_setfattr = check_executable('setfattr', ['--version'])
6062 user_has_xattr = check_executable('xattr', ['-h'])
6063
6064 if user_has_setfattr or user_has_xattr:
6065
6066 value = value.decode('utf-8')
6067 if user_has_setfattr:
6068 executable = 'setfattr'
6069 opts = ['-n', key, '-v', value]
6070 elif user_has_xattr:
6071 executable = 'xattr'
6072 opts = ['-w', key, value]
6073
3089bc74
S
6074 cmd = ([encodeFilename(executable, True)]
6075 + [encodeArgument(o) for o in opts]
6076 + [encodeFilename(path, True)])
efa97bdc
YCH
6077
6078 try:
6079 p = subprocess.Popen(
6080 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
6081 except EnvironmentError as e:
6082 raise XAttrMetadataError(e.errno, e.strerror)
f5b1bca9 6083 stdout, stderr = process_communicate_or_kill(p)
efa97bdc
YCH
6084 stderr = stderr.decode('utf-8', 'replace')
6085 if p.returncode != 0:
6086 raise XAttrMetadataError(p.returncode, stderr)
6087
6088 else:
6089 # On Unix, and can't find pyxattr, setfattr, or xattr.
6090 if sys.platform.startswith('linux'):
6091 raise XAttrUnavailableError(
6092 "Couldn't find a tool to set the xattrs. "
6093 "Install either the python 'pyxattr' or 'xattr' "
6094 "modules, or the GNU 'attr' package "
6095 "(which contains the 'setfattr' tool).")
6096 else:
6097 raise XAttrUnavailableError(
6098 "Couldn't find a tool to set the xattrs. "
6099 "Install either the python 'xattr' module, "
6100 "or the 'xattr' binary.")
0c265486
YCH
6101
6102
6103def random_birthday(year_field, month_field, day_field):
aa374bc7
AS
6104 start_date = datetime.date(1950, 1, 1)
6105 end_date = datetime.date(1995, 12, 31)
6106 offset = random.randint(0, (end_date - start_date).days)
6107 random_date = start_date + datetime.timedelta(offset)
0c265486 6108 return {
aa374bc7
AS
6109 year_field: str(random_date.year),
6110 month_field: str(random_date.month),
6111 day_field: str(random_date.day),
0c265486 6112 }
732044af 6113
c76eb41b 6114
732044af 6115# Templates for internet shortcut files, which are plain text files.
6116DOT_URL_LINK_TEMPLATE = '''
6117[InternetShortcut]
6118URL=%(url)s
6119'''.lstrip()
6120
6121DOT_WEBLOC_LINK_TEMPLATE = '''
6122<?xml version="1.0" encoding="UTF-8"?>
6123<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
6124<plist version="1.0">
6125<dict>
6126\t<key>URL</key>
6127\t<string>%(url)s</string>
6128</dict>
6129</plist>
6130'''.lstrip()
6131
6132DOT_DESKTOP_LINK_TEMPLATE = '''
6133[Desktop Entry]
6134Encoding=UTF-8
6135Name=%(filename)s
6136Type=Link
6137URL=%(url)s
6138Icon=text-html
6139'''.lstrip()
6140
6141
6142def iri_to_uri(iri):
6143 """
6144 Converts an IRI (Internationalized Resource Identifier, allowing Unicode characters) to a URI (Uniform Resource Identifier, ASCII-only).
6145
6146 The function doesn't add an additional layer of escaping; e.g., it doesn't escape `%3C` as `%253C`. Instead, it percent-escapes characters with an underlying UTF-8 encoding *besides* those already escaped, leaving the URI intact.
6147 """
6148
6149 iri_parts = compat_urllib_parse_urlparse(iri)
6150
6151 if '[' in iri_parts.netloc:
6152 raise ValueError('IPv6 URIs are not, yet, supported.')
6153 # Querying `.netloc`, when there's only one bracket, also raises a ValueError.
6154
6155 # The `safe` argument values, that the following code uses, contain the characters that should not be percent-encoded. Everything else but letters, digits and '_.-' will be percent-encoded with an underlying UTF-8 encoding. Everything already percent-encoded will be left as is.
6156
6157 net_location = ''
6158 if iri_parts.username:
6159 net_location += compat_urllib_parse_quote(iri_parts.username, safe=r"!$%&'()*+,~")
6160 if iri_parts.password is not None:
6161 net_location += ':' + compat_urllib_parse_quote(iri_parts.password, safe=r"!$%&'()*+,~")
6162 net_location += '@'
6163
6164 net_location += iri_parts.hostname.encode('idna').decode('utf-8') # Punycode for Unicode hostnames.
6165 # The 'idna' encoding produces ASCII text.
6166 if iri_parts.port is not None and iri_parts.port != 80:
6167 net_location += ':' + str(iri_parts.port)
6168
6169 return compat_urllib_parse_urlunparse(
6170 (iri_parts.scheme,
6171 net_location,
6172
6173 compat_urllib_parse_quote_plus(iri_parts.path, safe=r"!$%&'()*+,/:;=@|~"),
6174
6175 # Unsure about the `safe` argument, since this is a legacy way of handling parameters.
6176 compat_urllib_parse_quote_plus(iri_parts.params, safe=r"!$%&'()*+,/:;=@|~"),
6177
6178 # Not totally sure about the `safe` argument, since the source does not explicitly mention the query URI component.
6179 compat_urllib_parse_quote_plus(iri_parts.query, safe=r"!$%&'()*+,/:;=?@{|}~"),
6180
6181 compat_urllib_parse_quote_plus(iri_parts.fragment, safe=r"!#$%&'()*+,/:;=?@{|}~")))
6182
6183 # Source for `safe` arguments: https://url.spec.whatwg.org/#percent-encoded-bytes.
6184
6185
6186def to_high_limit_path(path):
6187 if sys.platform in ['win32', 'cygwin']:
6188 # Work around MAX_PATH limitation on Windows. The maximum allowed length for the individual path segments may still be quite limited.
6189 return r'\\?\ '.rstrip() + os.path.abspath(path)
6190
6191 return path
76d321f6 6192
c76eb41b 6193
b868936c 6194def format_field(obj, field=None, template='%s', ignore=(None, ''), default='', func=None):
6195 if field is None:
6196 val = obj if obj is not None else default
6197 else:
6198 val = obj.get(field, default)
76d321f6 6199 if func and val not in ignore:
6200 val = func(val)
6201 return template % val if val not in ignore else default
00dd0cd5 6202
6203
6204def clean_podcast_url(url):
6205 return re.sub(r'''(?x)
6206 (?:
6207 (?:
6208 chtbl\.com/track|
6209 media\.blubrry\.com| # https://create.blubrry.com/resources/podcast-media-download-statistics/getting-started/
6210 play\.podtrac\.com
6211 )/[^/]+|
6212 (?:dts|www)\.podtrac\.com/(?:pts/)?redirect\.[0-9a-z]{3,4}| # http://analytics.podtrac.com/how-to-measure
6213 flex\.acast\.com|
6214 pd(?:
6215 cn\.co| # https://podcorn.com/analytics-prefix/
6216 st\.fm # https://podsights.com/docs/
6217 )/e
6218 )/''', '', url)
ffcb8191
THD
6219
6220
6221_HEX_TABLE = '0123456789abcdef'
6222
6223
6224def random_uuidv4():
6225 return re.sub(r'[xy]', lambda x: _HEX_TABLE[random.randint(0, 15)], 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx')
0202b52a 6226
6227
6228def make_dir(path, to_screen=None):
6229 try:
6230 dn = os.path.dirname(path)
6231 if dn and not os.path.exists(dn):
6232 os.makedirs(dn)
6233 return True
6234 except (OSError, IOError) as err:
6235 if callable(to_screen) is not None:
6236 to_screen('unable to create directory ' + error_to_compat_str(err))
6237 return False
f74980cb 6238
6239
6240def get_executable_path():
c552ae88 6241 from zipimport import zipimporter
6242 if hasattr(sys, 'frozen'): # Running from PyInstaller
6243 path = os.path.dirname(sys.executable)
6244 elif isinstance(globals().get('__loader__'), zipimporter): # Running from ZIP
6245 path = os.path.join(os.path.dirname(__file__), '../..')
6246 else:
6247 path = os.path.join(os.path.dirname(__file__), '..')
f74980cb 6248 return os.path.abspath(path)
6249
6250
2f567473 6251def load_plugins(name, suffix, namespace):
f74980cb 6252 plugin_info = [None]
6253 classes = []
6254 try:
6255 plugin_info = imp.find_module(
6256 name, [os.path.join(get_executable_path(), 'ytdlp_plugins')])
6257 plugins = imp.load_module(name, *plugin_info)
6258 for name in dir(plugins):
2f567473 6259 if name in namespace:
6260 continue
6261 if not name.endswith(suffix):
f74980cb 6262 continue
6263 klass = getattr(plugins, name)
6264 classes.append(klass)
6265 namespace[name] = klass
6266 except ImportError:
6267 pass
6268 finally:
6269 if plugin_info[0] is not None:
6270 plugin_info[0].close()
6271 return classes
06167fbb 6272
6273
325ebc17 6274def traverse_obj(
352d63fd 6275 obj, *path_list, default=None, expected_type=None, get_all=True,
325ebc17 6276 casesense=True, is_user_input=False, traverse_string=False):
324ad820 6277 ''' Traverse nested list/dict/tuple
8f334380 6278 @param path_list A list of paths which are checked one by one.
6279 Each path is a list of keys where each key is a string,
6280 a tuple of strings or "...". When a tuple is given,
6281 all the keys given in the tuple are traversed, and
6282 "..." traverses all the keys in the object
325ebc17 6283 @param default Default value to return
352d63fd 6284 @param expected_type Only accept final value of this type (Can also be any callable)
6285 @param get_all Return all the values obtained from a path or only the first one
324ad820 6286 @param casesense Whether to consider dictionary keys as case sensitive
6287 @param is_user_input Whether the keys are generated from user input. If True,
6288 strings are converted to int/slice if necessary
6289 @param traverse_string Whether to traverse inside strings. If True, any
6290 non-compatible object will also be converted into a string
8f334380 6291 # TODO: Write tests
324ad820 6292 '''
325ebc17 6293 if not casesense:
dbf5416a 6294 _lower = lambda k: (k.lower() if isinstance(k, str) else k)
8f334380 6295 path_list = (map(_lower, variadic(path)) for path in path_list)
6296
6297 def _traverse_obj(obj, path, _current_depth=0):
6298 nonlocal depth
575e17a1 6299 if obj is None:
6300 return None
8f334380 6301 path = tuple(variadic(path))
6302 for i, key in enumerate(path):
6303 if isinstance(key, (list, tuple)):
6304 obj = [_traverse_obj(obj, sub_key, _current_depth) for sub_key in key]
6305 key = ...
6306 if key is ...:
6307 obj = (obj.values() if isinstance(obj, dict)
6308 else obj if isinstance(obj, (list, tuple, LazyList))
6309 else str(obj) if traverse_string else [])
6310 _current_depth += 1
6311 depth = max(depth, _current_depth)
6312 return [_traverse_obj(inner_obj, path[i + 1:], _current_depth) for inner_obj in obj]
575e17a1 6313 elif isinstance(obj, dict) and not (is_user_input and key == ':'):
325ebc17 6314 obj = (obj.get(key) if casesense or (key in obj)
6315 else next((v for k, v in obj.items() if _lower(k) == key), None))
6316 else:
6317 if is_user_input:
6318 key = (int_or_none(key) if ':' not in key
6319 else slice(*map(int_or_none, key.split(':'))))
8f334380 6320 if key == slice(None):
575e17a1 6321 return _traverse_obj(obj, (..., *path[i + 1:]), _current_depth)
325ebc17 6322 if not isinstance(key, (int, slice)):
9fea350f 6323 return None
8f334380 6324 if not isinstance(obj, (list, tuple, LazyList)):
325ebc17 6325 if not traverse_string:
6326 return None
6327 obj = str(obj)
6328 try:
6329 obj = obj[key]
6330 except IndexError:
324ad820 6331 return None
325ebc17 6332 return obj
6333
352d63fd 6334 if isinstance(expected_type, type):
6335 type_test = lambda val: val if isinstance(val, expected_type) else None
6336 elif expected_type is not None:
6337 type_test = expected_type
6338 else:
6339 type_test = lambda val: val
6340
8f334380 6341 for path in path_list:
6342 depth = 0
6343 val = _traverse_obj(obj, path)
325ebc17 6344 if val is not None:
8f334380 6345 if depth:
6346 for _ in range(depth - 1):
6586bca9 6347 val = itertools.chain.from_iterable(v for v in val if v is not None)
352d63fd 6348 val = [v for v in map(type_test, val) if v is not None]
8f334380 6349 if val:
352d63fd 6350 return val if get_all else val[0]
6351 else:
6352 val = type_test(val)
6353 if val is not None:
8f334380 6354 return val
325ebc17 6355 return default
324ad820 6356
6357
6358def traverse_dict(dictn, keys, casesense=True):
6359 ''' For backward compatibility. Do not use '''
6360 return traverse_obj(dictn, keys, casesense=casesense,
6361 is_user_input=True, traverse_string=True)
6606817a 6362
6363
c634ad2a 6364def variadic(x, allowed_types=(str, bytes)):
cb89cfc1 6365 return x if isinstance(x, collections.abc.Iterable) and not isinstance(x, allowed_types) else (x,)