]> jfr.im git - yt-dlp.git/blame - yt_dlp/utils.py
[EmbedThumbnail] Fix for already downloaded thumbnail
[yt-dlp.git] / yt_dlp / utils.py
CommitLineData
cc52de43 1#!/usr/bin/env python3
dcdb292f 2# coding: utf-8
d77c3dfd 3
ecc0c5ee
PH
4from __future__ import unicode_literals
5
1e399778 6import base64
5bc880b9 7import binascii
912b38b4 8import calendar
676eb3f2 9import codecs
c380cc28 10import collections
62e609ab 11import contextlib
e3946f98 12import ctypes
c496ca96
PH
13import datetime
14import email.utils
0c265486 15import email.header
f45c185f 16import errno
be4a824d 17import functools
d77c3dfd 18import gzip
f74980cb 19import imp
03f9daab 20import io
79a2e94e 21import itertools
f4bfd65f 22import json
d77c3dfd 23import locale
02dbf93f 24import math
347de493 25import operator
d77c3dfd 26import os
c496ca96 27import platform
773f291d 28import random
d77c3dfd 29import re
c496ca96 30import socket
79a2e94e 31import ssl
1c088fa8 32import subprocess
d77c3dfd 33import sys
181c8655 34import tempfile
c380cc28 35import time
01951dda 36import traceback
bcf89ce6 37import xml.etree.ElementTree
d77c3dfd 38import zlib
d77c3dfd 39
8c25f81b 40from .compat import (
b4a3d461 41 compat_HTMLParseError,
8bb56eee 42 compat_HTMLParser,
201c1459 43 compat_HTTPError,
8f9312c3 44 compat_basestring,
8c25f81b 45 compat_chr,
1bab3437 46 compat_cookiejar,
d7cd9a9e 47 compat_ctypes_WINFUNCTYPE,
36e6f62c 48 compat_etree_fromstring,
51098426 49 compat_expanduser,
8c25f81b 50 compat_html_entities,
55b2f099 51 compat_html_entities_html5,
be4a824d 52 compat_http_client,
42db58ec 53 compat_integer_types,
e29663c6 54 compat_numeric_types,
c86b6142 55 compat_kwargs,
efa97bdc 56 compat_os_name,
8c25f81b 57 compat_parse_qs,
702ccf2d 58 compat_shlex_quote,
8c25f81b 59 compat_str,
edaa23f8 60 compat_struct_pack,
d3f8e038 61 compat_struct_unpack,
8c25f81b
PH
62 compat_urllib_error,
63 compat_urllib_parse,
15707c7e 64 compat_urllib_parse_urlencode,
8c25f81b 65 compat_urllib_parse_urlparse,
732044af 66 compat_urllib_parse_urlunparse,
67 compat_urllib_parse_quote,
68 compat_urllib_parse_quote_plus,
7581bfc9 69 compat_urllib_parse_unquote_plus,
8c25f81b
PH
70 compat_urllib_request,
71 compat_urlparse,
810c10ba 72 compat_xpath,
8c25f81b 73)
4644ac55 74
71aff188
YCH
75from .socks import (
76 ProxyType,
77 sockssocket,
78)
79
4644ac55 80
51fb4995
YCH
81def register_socks_protocols():
82 # "Register" SOCKS protocols
d5ae6bb5
YCH
83 # In Python < 2.6.5, urlsplit() suffers from bug https://bugs.python.org/issue7904
84 # URLs with protocols not in urlparse.uses_netloc are not handled correctly
51fb4995
YCH
85 for scheme in ('socks', 'socks4', 'socks4a', 'socks5'):
86 if scheme not in compat_urlparse.uses_netloc:
87 compat_urlparse.uses_netloc.append(scheme)
88
89
468e2e92
FV
90# This is not clearly defined otherwise
91compiled_regex_type = type(re.compile(''))
92
f7a147e3
S
93
94def random_user_agent():
95 _USER_AGENT_TPL = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36'
96 _CHROME_VERSIONS = (
97 '74.0.3729.129',
98 '76.0.3780.3',
99 '76.0.3780.2',
100 '74.0.3729.128',
101 '76.0.3780.1',
102 '76.0.3780.0',
103 '75.0.3770.15',
104 '74.0.3729.127',
105 '74.0.3729.126',
106 '76.0.3779.1',
107 '76.0.3779.0',
108 '75.0.3770.14',
109 '74.0.3729.125',
110 '76.0.3778.1',
111 '76.0.3778.0',
112 '75.0.3770.13',
113 '74.0.3729.124',
114 '74.0.3729.123',
115 '73.0.3683.121',
116 '76.0.3777.1',
117 '76.0.3777.0',
118 '75.0.3770.12',
119 '74.0.3729.122',
120 '76.0.3776.4',
121 '75.0.3770.11',
122 '74.0.3729.121',
123 '76.0.3776.3',
124 '76.0.3776.2',
125 '73.0.3683.120',
126 '74.0.3729.120',
127 '74.0.3729.119',
128 '74.0.3729.118',
129 '76.0.3776.1',
130 '76.0.3776.0',
131 '76.0.3775.5',
132 '75.0.3770.10',
133 '74.0.3729.117',
134 '76.0.3775.4',
135 '76.0.3775.3',
136 '74.0.3729.116',
137 '75.0.3770.9',
138 '76.0.3775.2',
139 '76.0.3775.1',
140 '76.0.3775.0',
141 '75.0.3770.8',
142 '74.0.3729.115',
143 '74.0.3729.114',
144 '76.0.3774.1',
145 '76.0.3774.0',
146 '75.0.3770.7',
147 '74.0.3729.113',
148 '74.0.3729.112',
149 '74.0.3729.111',
150 '76.0.3773.1',
151 '76.0.3773.0',
152 '75.0.3770.6',
153 '74.0.3729.110',
154 '74.0.3729.109',
155 '76.0.3772.1',
156 '76.0.3772.0',
157 '75.0.3770.5',
158 '74.0.3729.108',
159 '74.0.3729.107',
160 '76.0.3771.1',
161 '76.0.3771.0',
162 '75.0.3770.4',
163 '74.0.3729.106',
164 '74.0.3729.105',
165 '75.0.3770.3',
166 '74.0.3729.104',
167 '74.0.3729.103',
168 '74.0.3729.102',
169 '75.0.3770.2',
170 '74.0.3729.101',
171 '75.0.3770.1',
172 '75.0.3770.0',
173 '74.0.3729.100',
174 '75.0.3769.5',
175 '75.0.3769.4',
176 '74.0.3729.99',
177 '75.0.3769.3',
178 '75.0.3769.2',
179 '75.0.3768.6',
180 '74.0.3729.98',
181 '75.0.3769.1',
182 '75.0.3769.0',
183 '74.0.3729.97',
184 '73.0.3683.119',
185 '73.0.3683.118',
186 '74.0.3729.96',
187 '75.0.3768.5',
188 '75.0.3768.4',
189 '75.0.3768.3',
190 '75.0.3768.2',
191 '74.0.3729.95',
192 '74.0.3729.94',
193 '75.0.3768.1',
194 '75.0.3768.0',
195 '74.0.3729.93',
196 '74.0.3729.92',
197 '73.0.3683.117',
198 '74.0.3729.91',
199 '75.0.3766.3',
200 '74.0.3729.90',
201 '75.0.3767.2',
202 '75.0.3767.1',
203 '75.0.3767.0',
204 '74.0.3729.89',
205 '73.0.3683.116',
206 '75.0.3766.2',
207 '74.0.3729.88',
208 '75.0.3766.1',
209 '75.0.3766.0',
210 '74.0.3729.87',
211 '73.0.3683.115',
212 '74.0.3729.86',
213 '75.0.3765.1',
214 '75.0.3765.0',
215 '74.0.3729.85',
216 '73.0.3683.114',
217 '74.0.3729.84',
218 '75.0.3764.1',
219 '75.0.3764.0',
220 '74.0.3729.83',
221 '73.0.3683.113',
222 '75.0.3763.2',
223 '75.0.3761.4',
224 '74.0.3729.82',
225 '75.0.3763.1',
226 '75.0.3763.0',
227 '74.0.3729.81',
228 '73.0.3683.112',
229 '75.0.3762.1',
230 '75.0.3762.0',
231 '74.0.3729.80',
232 '75.0.3761.3',
233 '74.0.3729.79',
234 '73.0.3683.111',
235 '75.0.3761.2',
236 '74.0.3729.78',
237 '74.0.3729.77',
238 '75.0.3761.1',
239 '75.0.3761.0',
240 '73.0.3683.110',
241 '74.0.3729.76',
242 '74.0.3729.75',
243 '75.0.3760.0',
244 '74.0.3729.74',
245 '75.0.3759.8',
246 '75.0.3759.7',
247 '75.0.3759.6',
248 '74.0.3729.73',
249 '75.0.3759.5',
250 '74.0.3729.72',
251 '73.0.3683.109',
252 '75.0.3759.4',
253 '75.0.3759.3',
254 '74.0.3729.71',
255 '75.0.3759.2',
256 '74.0.3729.70',
257 '73.0.3683.108',
258 '74.0.3729.69',
259 '75.0.3759.1',
260 '75.0.3759.0',
261 '74.0.3729.68',
262 '73.0.3683.107',
263 '74.0.3729.67',
264 '75.0.3758.1',
265 '75.0.3758.0',
266 '74.0.3729.66',
267 '73.0.3683.106',
268 '74.0.3729.65',
269 '75.0.3757.1',
270 '75.0.3757.0',
271 '74.0.3729.64',
272 '73.0.3683.105',
273 '74.0.3729.63',
274 '75.0.3756.1',
275 '75.0.3756.0',
276 '74.0.3729.62',
277 '73.0.3683.104',
278 '75.0.3755.3',
279 '75.0.3755.2',
280 '73.0.3683.103',
281 '75.0.3755.1',
282 '75.0.3755.0',
283 '74.0.3729.61',
284 '73.0.3683.102',
285 '74.0.3729.60',
286 '75.0.3754.2',
287 '74.0.3729.59',
288 '75.0.3753.4',
289 '74.0.3729.58',
290 '75.0.3754.1',
291 '75.0.3754.0',
292 '74.0.3729.57',
293 '73.0.3683.101',
294 '75.0.3753.3',
295 '75.0.3752.2',
296 '75.0.3753.2',
297 '74.0.3729.56',
298 '75.0.3753.1',
299 '75.0.3753.0',
300 '74.0.3729.55',
301 '73.0.3683.100',
302 '74.0.3729.54',
303 '75.0.3752.1',
304 '75.0.3752.0',
305 '74.0.3729.53',
306 '73.0.3683.99',
307 '74.0.3729.52',
308 '75.0.3751.1',
309 '75.0.3751.0',
310 '74.0.3729.51',
311 '73.0.3683.98',
312 '74.0.3729.50',
313 '75.0.3750.0',
314 '74.0.3729.49',
315 '74.0.3729.48',
316 '74.0.3729.47',
317 '75.0.3749.3',
318 '74.0.3729.46',
319 '73.0.3683.97',
320 '75.0.3749.2',
321 '74.0.3729.45',
322 '75.0.3749.1',
323 '75.0.3749.0',
324 '74.0.3729.44',
325 '73.0.3683.96',
326 '74.0.3729.43',
327 '74.0.3729.42',
328 '75.0.3748.1',
329 '75.0.3748.0',
330 '74.0.3729.41',
331 '75.0.3747.1',
332 '73.0.3683.95',
333 '75.0.3746.4',
334 '74.0.3729.40',
335 '74.0.3729.39',
336 '75.0.3747.0',
337 '75.0.3746.3',
338 '75.0.3746.2',
339 '74.0.3729.38',
340 '75.0.3746.1',
341 '75.0.3746.0',
342 '74.0.3729.37',
343 '73.0.3683.94',
344 '75.0.3745.5',
345 '75.0.3745.4',
346 '75.0.3745.3',
347 '75.0.3745.2',
348 '74.0.3729.36',
349 '75.0.3745.1',
350 '75.0.3745.0',
351 '75.0.3744.2',
352 '74.0.3729.35',
353 '73.0.3683.93',
354 '74.0.3729.34',
355 '75.0.3744.1',
356 '75.0.3744.0',
357 '74.0.3729.33',
358 '73.0.3683.92',
359 '74.0.3729.32',
360 '74.0.3729.31',
361 '73.0.3683.91',
362 '75.0.3741.2',
363 '75.0.3740.5',
364 '74.0.3729.30',
365 '75.0.3741.1',
366 '75.0.3741.0',
367 '74.0.3729.29',
368 '75.0.3740.4',
369 '73.0.3683.90',
370 '74.0.3729.28',
371 '75.0.3740.3',
372 '73.0.3683.89',
373 '75.0.3740.2',
374 '74.0.3729.27',
375 '75.0.3740.1',
376 '75.0.3740.0',
377 '74.0.3729.26',
378 '73.0.3683.88',
379 '73.0.3683.87',
380 '74.0.3729.25',
381 '75.0.3739.1',
382 '75.0.3739.0',
383 '73.0.3683.86',
384 '74.0.3729.24',
385 '73.0.3683.85',
386 '75.0.3738.4',
387 '75.0.3738.3',
388 '75.0.3738.2',
389 '75.0.3738.1',
390 '75.0.3738.0',
391 '74.0.3729.23',
392 '73.0.3683.84',
393 '74.0.3729.22',
394 '74.0.3729.21',
395 '75.0.3737.1',
396 '75.0.3737.0',
397 '74.0.3729.20',
398 '73.0.3683.83',
399 '74.0.3729.19',
400 '75.0.3736.1',
401 '75.0.3736.0',
402 '74.0.3729.18',
403 '73.0.3683.82',
404 '74.0.3729.17',
405 '75.0.3735.1',
406 '75.0.3735.0',
407 '74.0.3729.16',
408 '73.0.3683.81',
409 '75.0.3734.1',
410 '75.0.3734.0',
411 '74.0.3729.15',
412 '73.0.3683.80',
413 '74.0.3729.14',
414 '75.0.3733.1',
415 '75.0.3733.0',
416 '75.0.3732.1',
417 '74.0.3729.13',
418 '74.0.3729.12',
419 '73.0.3683.79',
420 '74.0.3729.11',
421 '75.0.3732.0',
422 '74.0.3729.10',
423 '73.0.3683.78',
424 '74.0.3729.9',
425 '74.0.3729.8',
426 '74.0.3729.7',
427 '75.0.3731.3',
428 '75.0.3731.2',
429 '75.0.3731.0',
430 '74.0.3729.6',
431 '73.0.3683.77',
432 '73.0.3683.76',
433 '75.0.3730.5',
434 '75.0.3730.4',
435 '73.0.3683.75',
436 '74.0.3729.5',
437 '73.0.3683.74',
438 '75.0.3730.3',
439 '75.0.3730.2',
440 '74.0.3729.4',
441 '73.0.3683.73',
442 '73.0.3683.72',
443 '75.0.3730.1',
444 '75.0.3730.0',
445 '74.0.3729.3',
446 '73.0.3683.71',
447 '74.0.3729.2',
448 '73.0.3683.70',
449 '74.0.3729.1',
450 '74.0.3729.0',
451 '74.0.3726.4',
452 '73.0.3683.69',
453 '74.0.3726.3',
454 '74.0.3728.0',
455 '74.0.3726.2',
456 '73.0.3683.68',
457 '74.0.3726.1',
458 '74.0.3726.0',
459 '74.0.3725.4',
460 '73.0.3683.67',
461 '73.0.3683.66',
462 '74.0.3725.3',
463 '74.0.3725.2',
464 '74.0.3725.1',
465 '74.0.3724.8',
466 '74.0.3725.0',
467 '73.0.3683.65',
468 '74.0.3724.7',
469 '74.0.3724.6',
470 '74.0.3724.5',
471 '74.0.3724.4',
472 '74.0.3724.3',
473 '74.0.3724.2',
474 '74.0.3724.1',
475 '74.0.3724.0',
476 '73.0.3683.64',
477 '74.0.3723.1',
478 '74.0.3723.0',
479 '73.0.3683.63',
480 '74.0.3722.1',
481 '74.0.3722.0',
482 '73.0.3683.62',
483 '74.0.3718.9',
484 '74.0.3702.3',
485 '74.0.3721.3',
486 '74.0.3721.2',
487 '74.0.3721.1',
488 '74.0.3721.0',
489 '74.0.3720.6',
490 '73.0.3683.61',
491 '72.0.3626.122',
492 '73.0.3683.60',
493 '74.0.3720.5',
494 '72.0.3626.121',
495 '74.0.3718.8',
496 '74.0.3720.4',
497 '74.0.3720.3',
498 '74.0.3718.7',
499 '74.0.3720.2',
500 '74.0.3720.1',
501 '74.0.3720.0',
502 '74.0.3718.6',
503 '74.0.3719.5',
504 '73.0.3683.59',
505 '74.0.3718.5',
506 '74.0.3718.4',
507 '74.0.3719.4',
508 '74.0.3719.3',
509 '74.0.3719.2',
510 '74.0.3719.1',
511 '73.0.3683.58',
512 '74.0.3719.0',
513 '73.0.3683.57',
514 '73.0.3683.56',
515 '74.0.3718.3',
516 '73.0.3683.55',
517 '74.0.3718.2',
518 '74.0.3718.1',
519 '74.0.3718.0',
520 '73.0.3683.54',
521 '74.0.3717.2',
522 '73.0.3683.53',
523 '74.0.3717.1',
524 '74.0.3717.0',
525 '73.0.3683.52',
526 '74.0.3716.1',
527 '74.0.3716.0',
528 '73.0.3683.51',
529 '74.0.3715.1',
530 '74.0.3715.0',
531 '73.0.3683.50',
532 '74.0.3711.2',
533 '74.0.3714.2',
534 '74.0.3713.3',
535 '74.0.3714.1',
536 '74.0.3714.0',
537 '73.0.3683.49',
538 '74.0.3713.1',
539 '74.0.3713.0',
540 '72.0.3626.120',
541 '73.0.3683.48',
542 '74.0.3712.2',
543 '74.0.3712.1',
544 '74.0.3712.0',
545 '73.0.3683.47',
546 '72.0.3626.119',
547 '73.0.3683.46',
548 '74.0.3710.2',
549 '72.0.3626.118',
550 '74.0.3711.1',
551 '74.0.3711.0',
552 '73.0.3683.45',
553 '72.0.3626.117',
554 '74.0.3710.1',
555 '74.0.3710.0',
556 '73.0.3683.44',
557 '72.0.3626.116',
558 '74.0.3709.1',
559 '74.0.3709.0',
560 '74.0.3704.9',
561 '73.0.3683.43',
562 '72.0.3626.115',
563 '74.0.3704.8',
564 '74.0.3704.7',
565 '74.0.3708.0',
566 '74.0.3706.7',
567 '74.0.3704.6',
568 '73.0.3683.42',
569 '72.0.3626.114',
570 '74.0.3706.6',
571 '72.0.3626.113',
572 '74.0.3704.5',
573 '74.0.3706.5',
574 '74.0.3706.4',
575 '74.0.3706.3',
576 '74.0.3706.2',
577 '74.0.3706.1',
578 '74.0.3706.0',
579 '73.0.3683.41',
580 '72.0.3626.112',
581 '74.0.3705.1',
582 '74.0.3705.0',
583 '73.0.3683.40',
584 '72.0.3626.111',
585 '73.0.3683.39',
586 '74.0.3704.4',
587 '73.0.3683.38',
588 '74.0.3704.3',
589 '74.0.3704.2',
590 '74.0.3704.1',
591 '74.0.3704.0',
592 '73.0.3683.37',
593 '72.0.3626.110',
594 '72.0.3626.109',
595 '74.0.3703.3',
596 '74.0.3703.2',
597 '73.0.3683.36',
598 '74.0.3703.1',
599 '74.0.3703.0',
600 '73.0.3683.35',
601 '72.0.3626.108',
602 '74.0.3702.2',
603 '74.0.3699.3',
604 '74.0.3702.1',
605 '74.0.3702.0',
606 '73.0.3683.34',
607 '72.0.3626.107',
608 '73.0.3683.33',
609 '74.0.3701.1',
610 '74.0.3701.0',
611 '73.0.3683.32',
612 '73.0.3683.31',
613 '72.0.3626.105',
614 '74.0.3700.1',
615 '74.0.3700.0',
616 '73.0.3683.29',
617 '72.0.3626.103',
618 '74.0.3699.2',
619 '74.0.3699.1',
620 '74.0.3699.0',
621 '73.0.3683.28',
622 '72.0.3626.102',
623 '73.0.3683.27',
624 '73.0.3683.26',
625 '74.0.3698.0',
626 '74.0.3696.2',
627 '72.0.3626.101',
628 '73.0.3683.25',
629 '74.0.3696.1',
630 '74.0.3696.0',
631 '74.0.3694.8',
632 '72.0.3626.100',
633 '74.0.3694.7',
634 '74.0.3694.6',
635 '74.0.3694.5',
636 '74.0.3694.4',
637 '72.0.3626.99',
638 '72.0.3626.98',
639 '74.0.3694.3',
640 '73.0.3683.24',
641 '72.0.3626.97',
642 '72.0.3626.96',
643 '72.0.3626.95',
644 '73.0.3683.23',
645 '72.0.3626.94',
646 '73.0.3683.22',
647 '73.0.3683.21',
648 '72.0.3626.93',
649 '74.0.3694.2',
650 '72.0.3626.92',
651 '74.0.3694.1',
652 '74.0.3694.0',
653 '74.0.3693.6',
654 '73.0.3683.20',
655 '72.0.3626.91',
656 '74.0.3693.5',
657 '74.0.3693.4',
658 '74.0.3693.3',
659 '74.0.3693.2',
660 '73.0.3683.19',
661 '74.0.3693.1',
662 '74.0.3693.0',
663 '73.0.3683.18',
664 '72.0.3626.90',
665 '74.0.3692.1',
666 '74.0.3692.0',
667 '73.0.3683.17',
668 '72.0.3626.89',
669 '74.0.3687.3',
670 '74.0.3691.1',
671 '74.0.3691.0',
672 '73.0.3683.16',
673 '72.0.3626.88',
674 '72.0.3626.87',
675 '73.0.3683.15',
676 '74.0.3690.1',
677 '74.0.3690.0',
678 '73.0.3683.14',
679 '72.0.3626.86',
680 '73.0.3683.13',
681 '73.0.3683.12',
682 '74.0.3689.1',
683 '74.0.3689.0',
684 '73.0.3683.11',
685 '72.0.3626.85',
686 '73.0.3683.10',
687 '72.0.3626.84',
688 '73.0.3683.9',
689 '74.0.3688.1',
690 '74.0.3688.0',
691 '73.0.3683.8',
692 '72.0.3626.83',
693 '74.0.3687.2',
694 '74.0.3687.1',
695 '74.0.3687.0',
696 '73.0.3683.7',
697 '72.0.3626.82',
698 '74.0.3686.4',
699 '72.0.3626.81',
700 '74.0.3686.3',
701 '74.0.3686.2',
702 '74.0.3686.1',
703 '74.0.3686.0',
704 '73.0.3683.6',
705 '72.0.3626.80',
706 '74.0.3685.1',
707 '74.0.3685.0',
708 '73.0.3683.5',
709 '72.0.3626.79',
710 '74.0.3684.1',
711 '74.0.3684.0',
712 '73.0.3683.4',
713 '72.0.3626.78',
714 '72.0.3626.77',
715 '73.0.3683.3',
716 '73.0.3683.2',
717 '72.0.3626.76',
718 '73.0.3683.1',
719 '73.0.3683.0',
720 '72.0.3626.75',
721 '71.0.3578.141',
722 '73.0.3682.1',
723 '73.0.3682.0',
724 '72.0.3626.74',
725 '71.0.3578.140',
726 '73.0.3681.4',
727 '73.0.3681.3',
728 '73.0.3681.2',
729 '73.0.3681.1',
730 '73.0.3681.0',
731 '72.0.3626.73',
732 '71.0.3578.139',
733 '72.0.3626.72',
734 '72.0.3626.71',
735 '73.0.3680.1',
736 '73.0.3680.0',
737 '72.0.3626.70',
738 '71.0.3578.138',
739 '73.0.3678.2',
740 '73.0.3679.1',
741 '73.0.3679.0',
742 '72.0.3626.69',
743 '71.0.3578.137',
744 '73.0.3678.1',
745 '73.0.3678.0',
746 '71.0.3578.136',
747 '73.0.3677.1',
748 '73.0.3677.0',
749 '72.0.3626.68',
750 '72.0.3626.67',
751 '71.0.3578.135',
752 '73.0.3676.1',
753 '73.0.3676.0',
754 '73.0.3674.2',
755 '72.0.3626.66',
756 '71.0.3578.134',
757 '73.0.3674.1',
758 '73.0.3674.0',
759 '72.0.3626.65',
760 '71.0.3578.133',
761 '73.0.3673.2',
762 '73.0.3673.1',
763 '73.0.3673.0',
764 '72.0.3626.64',
765 '71.0.3578.132',
766 '72.0.3626.63',
767 '72.0.3626.62',
768 '72.0.3626.61',
769 '72.0.3626.60',
770 '73.0.3672.1',
771 '73.0.3672.0',
772 '72.0.3626.59',
773 '71.0.3578.131',
774 '73.0.3671.3',
775 '73.0.3671.2',
776 '73.0.3671.1',
777 '73.0.3671.0',
778 '72.0.3626.58',
779 '71.0.3578.130',
780 '73.0.3670.1',
781 '73.0.3670.0',
782 '72.0.3626.57',
783 '71.0.3578.129',
784 '73.0.3669.1',
785 '73.0.3669.0',
786 '72.0.3626.56',
787 '71.0.3578.128',
788 '73.0.3668.2',
789 '73.0.3668.1',
790 '73.0.3668.0',
791 '72.0.3626.55',
792 '71.0.3578.127',
793 '73.0.3667.2',
794 '73.0.3667.1',
795 '73.0.3667.0',
796 '72.0.3626.54',
797 '71.0.3578.126',
798 '73.0.3666.1',
799 '73.0.3666.0',
800 '72.0.3626.53',
801 '71.0.3578.125',
802 '73.0.3665.4',
803 '73.0.3665.3',
804 '72.0.3626.52',
805 '73.0.3665.2',
806 '73.0.3664.4',
807 '73.0.3665.1',
808 '73.0.3665.0',
809 '72.0.3626.51',
810 '71.0.3578.124',
811 '72.0.3626.50',
812 '73.0.3664.3',
813 '73.0.3664.2',
814 '73.0.3664.1',
815 '73.0.3664.0',
816 '73.0.3663.2',
817 '72.0.3626.49',
818 '71.0.3578.123',
819 '73.0.3663.1',
820 '73.0.3663.0',
821 '72.0.3626.48',
822 '71.0.3578.122',
823 '73.0.3662.1',
824 '73.0.3662.0',
825 '72.0.3626.47',
826 '71.0.3578.121',
827 '73.0.3661.1',
828 '72.0.3626.46',
829 '73.0.3661.0',
830 '72.0.3626.45',
831 '71.0.3578.120',
832 '73.0.3660.2',
833 '73.0.3660.1',
834 '73.0.3660.0',
835 '72.0.3626.44',
836 '71.0.3578.119',
837 '73.0.3659.1',
838 '73.0.3659.0',
839 '72.0.3626.43',
840 '71.0.3578.118',
841 '73.0.3658.1',
842 '73.0.3658.0',
843 '72.0.3626.42',
844 '71.0.3578.117',
845 '73.0.3657.1',
846 '73.0.3657.0',
847 '72.0.3626.41',
848 '71.0.3578.116',
849 '73.0.3656.1',
850 '73.0.3656.0',
851 '72.0.3626.40',
852 '71.0.3578.115',
853 '73.0.3655.1',
854 '73.0.3655.0',
855 '72.0.3626.39',
856 '71.0.3578.114',
857 '73.0.3654.1',
858 '73.0.3654.0',
859 '72.0.3626.38',
860 '71.0.3578.113',
861 '73.0.3653.1',
862 '73.0.3653.0',
863 '72.0.3626.37',
864 '71.0.3578.112',
865 '73.0.3652.1',
866 '73.0.3652.0',
867 '72.0.3626.36',
868 '71.0.3578.111',
869 '73.0.3651.1',
870 '73.0.3651.0',
871 '72.0.3626.35',
872 '71.0.3578.110',
873 '73.0.3650.1',
874 '73.0.3650.0',
875 '72.0.3626.34',
876 '71.0.3578.109',
877 '73.0.3649.1',
878 '73.0.3649.0',
879 '72.0.3626.33',
880 '71.0.3578.108',
881 '73.0.3648.2',
882 '73.0.3648.1',
883 '73.0.3648.0',
884 '72.0.3626.32',
885 '71.0.3578.107',
886 '73.0.3647.2',
887 '73.0.3647.1',
888 '73.0.3647.0',
889 '72.0.3626.31',
890 '71.0.3578.106',
891 '73.0.3635.3',
892 '73.0.3646.2',
893 '73.0.3646.1',
894 '73.0.3646.0',
895 '72.0.3626.30',
896 '71.0.3578.105',
897 '72.0.3626.29',
898 '73.0.3645.2',
899 '73.0.3645.1',
900 '73.0.3645.0',
901 '72.0.3626.28',
902 '71.0.3578.104',
903 '72.0.3626.27',
904 '72.0.3626.26',
905 '72.0.3626.25',
906 '72.0.3626.24',
907 '73.0.3644.0',
908 '73.0.3643.2',
909 '72.0.3626.23',
910 '71.0.3578.103',
911 '73.0.3643.1',
912 '73.0.3643.0',
913 '72.0.3626.22',
914 '71.0.3578.102',
915 '73.0.3642.1',
916 '73.0.3642.0',
917 '72.0.3626.21',
918 '71.0.3578.101',
919 '73.0.3641.1',
920 '73.0.3641.0',
921 '72.0.3626.20',
922 '71.0.3578.100',
923 '72.0.3626.19',
924 '73.0.3640.1',
925 '73.0.3640.0',
926 '72.0.3626.18',
927 '73.0.3639.1',
928 '71.0.3578.99',
929 '73.0.3639.0',
930 '72.0.3626.17',
931 '73.0.3638.2',
932 '72.0.3626.16',
933 '73.0.3638.1',
934 '73.0.3638.0',
935 '72.0.3626.15',
936 '71.0.3578.98',
937 '73.0.3635.2',
938 '71.0.3578.97',
939 '73.0.3637.1',
940 '73.0.3637.0',
941 '72.0.3626.14',
942 '71.0.3578.96',
943 '71.0.3578.95',
944 '72.0.3626.13',
945 '71.0.3578.94',
946 '73.0.3636.2',
947 '71.0.3578.93',
948 '73.0.3636.1',
949 '73.0.3636.0',
950 '72.0.3626.12',
951 '71.0.3578.92',
952 '73.0.3635.1',
953 '73.0.3635.0',
954 '72.0.3626.11',
955 '71.0.3578.91',
956 '73.0.3634.2',
957 '73.0.3634.1',
958 '73.0.3634.0',
959 '72.0.3626.10',
960 '71.0.3578.90',
961 '71.0.3578.89',
962 '73.0.3633.2',
963 '73.0.3633.1',
964 '73.0.3633.0',
965 '72.0.3610.4',
966 '72.0.3626.9',
967 '71.0.3578.88',
968 '73.0.3632.5',
969 '73.0.3632.4',
970 '73.0.3632.3',
971 '73.0.3632.2',
972 '73.0.3632.1',
973 '73.0.3632.0',
974 '72.0.3626.8',
975 '71.0.3578.87',
976 '73.0.3631.2',
977 '73.0.3631.1',
978 '73.0.3631.0',
979 '72.0.3626.7',
980 '71.0.3578.86',
981 '72.0.3626.6',
982 '73.0.3630.1',
983 '73.0.3630.0',
984 '72.0.3626.5',
985 '71.0.3578.85',
986 '72.0.3626.4',
987 '73.0.3628.3',
988 '73.0.3628.2',
989 '73.0.3629.1',
990 '73.0.3629.0',
991 '72.0.3626.3',
992 '71.0.3578.84',
993 '73.0.3628.1',
994 '73.0.3628.0',
995 '71.0.3578.83',
996 '73.0.3627.1',
997 '73.0.3627.0',
998 '72.0.3626.2',
999 '71.0.3578.82',
1000 '71.0.3578.81',
1001 '71.0.3578.80',
1002 '72.0.3626.1',
1003 '72.0.3626.0',
1004 '71.0.3578.79',
1005 '70.0.3538.124',
1006 '71.0.3578.78',
1007 '72.0.3623.4',
1008 '72.0.3625.2',
1009 '72.0.3625.1',
1010 '72.0.3625.0',
1011 '71.0.3578.77',
1012 '70.0.3538.123',
1013 '72.0.3624.4',
1014 '72.0.3624.3',
1015 '72.0.3624.2',
1016 '71.0.3578.76',
1017 '72.0.3624.1',
1018 '72.0.3624.0',
1019 '72.0.3623.3',
1020 '71.0.3578.75',
1021 '70.0.3538.122',
1022 '71.0.3578.74',
1023 '72.0.3623.2',
1024 '72.0.3610.3',
1025 '72.0.3623.1',
1026 '72.0.3623.0',
1027 '72.0.3622.3',
1028 '72.0.3622.2',
1029 '71.0.3578.73',
1030 '70.0.3538.121',
1031 '72.0.3622.1',
1032 '72.0.3622.0',
1033 '71.0.3578.72',
1034 '70.0.3538.120',
1035 '72.0.3621.1',
1036 '72.0.3621.0',
1037 '71.0.3578.71',
1038 '70.0.3538.119',
1039 '72.0.3620.1',
1040 '72.0.3620.0',
1041 '71.0.3578.70',
1042 '70.0.3538.118',
1043 '71.0.3578.69',
1044 '72.0.3619.1',
1045 '72.0.3619.0',
1046 '71.0.3578.68',
1047 '70.0.3538.117',
1048 '71.0.3578.67',
1049 '72.0.3618.1',
1050 '72.0.3618.0',
1051 '71.0.3578.66',
1052 '70.0.3538.116',
1053 '72.0.3617.1',
1054 '72.0.3617.0',
1055 '71.0.3578.65',
1056 '70.0.3538.115',
1057 '72.0.3602.3',
1058 '71.0.3578.64',
1059 '72.0.3616.1',
1060 '72.0.3616.0',
1061 '71.0.3578.63',
1062 '70.0.3538.114',
1063 '71.0.3578.62',
1064 '72.0.3615.1',
1065 '72.0.3615.0',
1066 '71.0.3578.61',
1067 '70.0.3538.113',
1068 '72.0.3614.1',
1069 '72.0.3614.0',
1070 '71.0.3578.60',
1071 '70.0.3538.112',
1072 '72.0.3613.1',
1073 '72.0.3613.0',
1074 '71.0.3578.59',
1075 '70.0.3538.111',
1076 '72.0.3612.2',
1077 '72.0.3612.1',
1078 '72.0.3612.0',
1079 '70.0.3538.110',
1080 '71.0.3578.58',
1081 '70.0.3538.109',
1082 '72.0.3611.2',
1083 '72.0.3611.1',
1084 '72.0.3611.0',
1085 '71.0.3578.57',
1086 '70.0.3538.108',
1087 '72.0.3610.2',
1088 '71.0.3578.56',
1089 '71.0.3578.55',
1090 '72.0.3610.1',
1091 '72.0.3610.0',
1092 '71.0.3578.54',
1093 '70.0.3538.107',
1094 '71.0.3578.53',
1095 '72.0.3609.3',
1096 '71.0.3578.52',
1097 '72.0.3609.2',
1098 '71.0.3578.51',
1099 '72.0.3608.5',
1100 '72.0.3609.1',
1101 '72.0.3609.0',
1102 '71.0.3578.50',
1103 '70.0.3538.106',
1104 '72.0.3608.4',
1105 '72.0.3608.3',
1106 '72.0.3608.2',
1107 '71.0.3578.49',
1108 '72.0.3608.1',
1109 '72.0.3608.0',
1110 '70.0.3538.105',
1111 '71.0.3578.48',
1112 '72.0.3607.1',
1113 '72.0.3607.0',
1114 '71.0.3578.47',
1115 '70.0.3538.104',
1116 '72.0.3606.2',
1117 '72.0.3606.1',
1118 '72.0.3606.0',
1119 '71.0.3578.46',
1120 '70.0.3538.103',
1121 '70.0.3538.102',
1122 '72.0.3605.3',
1123 '72.0.3605.2',
1124 '72.0.3605.1',
1125 '72.0.3605.0',
1126 '71.0.3578.45',
1127 '70.0.3538.101',
1128 '71.0.3578.44',
1129 '71.0.3578.43',
1130 '70.0.3538.100',
1131 '70.0.3538.99',
1132 '71.0.3578.42',
1133 '72.0.3604.1',
1134 '72.0.3604.0',
1135 '71.0.3578.41',
1136 '70.0.3538.98',
1137 '71.0.3578.40',
1138 '72.0.3603.2',
1139 '72.0.3603.1',
1140 '72.0.3603.0',
1141 '71.0.3578.39',
1142 '70.0.3538.97',
1143 '72.0.3602.2',
1144 '71.0.3578.38',
1145 '71.0.3578.37',
1146 '72.0.3602.1',
1147 '72.0.3602.0',
1148 '71.0.3578.36',
1149 '70.0.3538.96',
1150 '72.0.3601.1',
1151 '72.0.3601.0',
1152 '71.0.3578.35',
1153 '70.0.3538.95',
1154 '72.0.3600.1',
1155 '72.0.3600.0',
1156 '71.0.3578.34',
1157 '70.0.3538.94',
1158 '72.0.3599.3',
1159 '72.0.3599.2',
1160 '72.0.3599.1',
1161 '72.0.3599.0',
1162 '71.0.3578.33',
1163 '70.0.3538.93',
1164 '72.0.3598.1',
1165 '72.0.3598.0',
1166 '71.0.3578.32',
1167 '70.0.3538.87',
1168 '72.0.3597.1',
1169 '72.0.3597.0',
1170 '72.0.3596.2',
1171 '71.0.3578.31',
1172 '70.0.3538.86',
1173 '71.0.3578.30',
1174 '71.0.3578.29',
1175 '72.0.3596.1',
1176 '72.0.3596.0',
1177 '71.0.3578.28',
1178 '70.0.3538.85',
1179 '72.0.3595.2',
1180 '72.0.3591.3',
1181 '72.0.3595.1',
1182 '72.0.3595.0',
1183 '71.0.3578.27',
1184 '70.0.3538.84',
1185 '72.0.3594.1',
1186 '72.0.3594.0',
1187 '71.0.3578.26',
1188 '70.0.3538.83',
1189 '72.0.3593.2',
1190 '72.0.3593.1',
1191 '72.0.3593.0',
1192 '71.0.3578.25',
1193 '70.0.3538.82',
1194 '72.0.3589.3',
1195 '72.0.3592.2',
1196 '72.0.3592.1',
1197 '72.0.3592.0',
1198 '71.0.3578.24',
1199 '72.0.3589.2',
1200 '70.0.3538.81',
1201 '70.0.3538.80',
1202 '72.0.3591.2',
1203 '72.0.3591.1',
1204 '72.0.3591.0',
1205 '71.0.3578.23',
1206 '70.0.3538.79',
1207 '71.0.3578.22',
1208 '72.0.3590.1',
1209 '72.0.3590.0',
1210 '71.0.3578.21',
1211 '70.0.3538.78',
1212 '70.0.3538.77',
1213 '72.0.3589.1',
1214 '72.0.3589.0',
1215 '71.0.3578.20',
1216 '70.0.3538.76',
1217 '71.0.3578.19',
1218 '70.0.3538.75',
1219 '72.0.3588.1',
1220 '72.0.3588.0',
1221 '71.0.3578.18',
1222 '70.0.3538.74',
1223 '72.0.3586.2',
1224 '72.0.3587.0',
1225 '71.0.3578.17',
1226 '70.0.3538.73',
1227 '72.0.3586.1',
1228 '72.0.3586.0',
1229 '71.0.3578.16',
1230 '70.0.3538.72',
1231 '72.0.3585.1',
1232 '72.0.3585.0',
1233 '71.0.3578.15',
1234 '70.0.3538.71',
1235 '71.0.3578.14',
1236 '72.0.3584.1',
1237 '72.0.3584.0',
1238 '71.0.3578.13',
1239 '70.0.3538.70',
1240 '72.0.3583.2',
1241 '71.0.3578.12',
1242 '72.0.3583.1',
1243 '72.0.3583.0',
1244 '71.0.3578.11',
1245 '70.0.3538.69',
1246 '71.0.3578.10',
1247 '72.0.3582.0',
1248 '72.0.3581.4',
1249 '71.0.3578.9',
1250 '70.0.3538.67',
1251 '72.0.3581.3',
1252 '72.0.3581.2',
1253 '72.0.3581.1',
1254 '72.0.3581.0',
1255 '71.0.3578.8',
1256 '70.0.3538.66',
1257 '72.0.3580.1',
1258 '72.0.3580.0',
1259 '71.0.3578.7',
1260 '70.0.3538.65',
1261 '71.0.3578.6',
1262 '72.0.3579.1',
1263 '72.0.3579.0',
1264 '71.0.3578.5',
1265 '70.0.3538.64',
1266 '71.0.3578.4',
1267 '71.0.3578.3',
1268 '71.0.3578.2',
1269 '71.0.3578.1',
1270 '71.0.3578.0',
1271 '70.0.3538.63',
1272 '69.0.3497.128',
1273 '70.0.3538.62',
1274 '70.0.3538.61',
1275 '70.0.3538.60',
1276 '70.0.3538.59',
1277 '71.0.3577.1',
1278 '71.0.3577.0',
1279 '70.0.3538.58',
1280 '69.0.3497.127',
1281 '71.0.3576.2',
1282 '71.0.3576.1',
1283 '71.0.3576.0',
1284 '70.0.3538.57',
1285 '70.0.3538.56',
1286 '71.0.3575.2',
1287 '70.0.3538.55',
1288 '69.0.3497.126',
1289 '70.0.3538.54',
1290 '71.0.3575.1',
1291 '71.0.3575.0',
1292 '71.0.3574.1',
1293 '71.0.3574.0',
1294 '70.0.3538.53',
1295 '69.0.3497.125',
1296 '70.0.3538.52',
1297 '71.0.3573.1',
1298 '71.0.3573.0',
1299 '70.0.3538.51',
1300 '69.0.3497.124',
1301 '71.0.3572.1',
1302 '71.0.3572.0',
1303 '70.0.3538.50',
1304 '69.0.3497.123',
1305 '71.0.3571.2',
1306 '70.0.3538.49',
1307 '69.0.3497.122',
1308 '71.0.3571.1',
1309 '71.0.3571.0',
1310 '70.0.3538.48',
1311 '69.0.3497.121',
1312 '71.0.3570.1',
1313 '71.0.3570.0',
1314 '70.0.3538.47',
1315 '69.0.3497.120',
1316 '71.0.3568.2',
1317 '71.0.3569.1',
1318 '71.0.3569.0',
1319 '70.0.3538.46',
1320 '69.0.3497.119',
1321 '70.0.3538.45',
1322 '71.0.3568.1',
1323 '71.0.3568.0',
1324 '70.0.3538.44',
1325 '69.0.3497.118',
1326 '70.0.3538.43',
1327 '70.0.3538.42',
1328 '71.0.3567.1',
1329 '71.0.3567.0',
1330 '70.0.3538.41',
1331 '69.0.3497.117',
1332 '71.0.3566.1',
1333 '71.0.3566.0',
1334 '70.0.3538.40',
1335 '69.0.3497.116',
1336 '71.0.3565.1',
1337 '71.0.3565.0',
1338 '70.0.3538.39',
1339 '69.0.3497.115',
1340 '71.0.3564.1',
1341 '71.0.3564.0',
1342 '70.0.3538.38',
1343 '69.0.3497.114',
1344 '71.0.3563.0',
1345 '71.0.3562.2',
1346 '70.0.3538.37',
1347 '69.0.3497.113',
1348 '70.0.3538.36',
1349 '70.0.3538.35',
1350 '71.0.3562.1',
1351 '71.0.3562.0',
1352 '70.0.3538.34',
1353 '69.0.3497.112',
1354 '70.0.3538.33',
1355 '71.0.3561.1',
1356 '71.0.3561.0',
1357 '70.0.3538.32',
1358 '69.0.3497.111',
1359 '71.0.3559.6',
1360 '71.0.3560.1',
1361 '71.0.3560.0',
1362 '71.0.3559.5',
1363 '71.0.3559.4',
1364 '70.0.3538.31',
1365 '69.0.3497.110',
1366 '71.0.3559.3',
1367 '70.0.3538.30',
1368 '69.0.3497.109',
1369 '71.0.3559.2',
1370 '71.0.3559.1',
1371 '71.0.3559.0',
1372 '70.0.3538.29',
1373 '69.0.3497.108',
1374 '71.0.3558.2',
1375 '71.0.3558.1',
1376 '71.0.3558.0',
1377 '70.0.3538.28',
1378 '69.0.3497.107',
1379 '71.0.3557.2',
1380 '71.0.3557.1',
1381 '71.0.3557.0',
1382 '70.0.3538.27',
1383 '69.0.3497.106',
1384 '71.0.3554.4',
1385 '70.0.3538.26',
1386 '71.0.3556.1',
1387 '71.0.3556.0',
1388 '70.0.3538.25',
1389 '71.0.3554.3',
1390 '69.0.3497.105',
1391 '71.0.3554.2',
1392 '70.0.3538.24',
1393 '69.0.3497.104',
1394 '71.0.3555.2',
1395 '70.0.3538.23',
1396 '71.0.3555.1',
1397 '71.0.3555.0',
1398 '70.0.3538.22',
1399 '69.0.3497.103',
1400 '71.0.3554.1',
1401 '71.0.3554.0',
1402 '70.0.3538.21',
1403 '69.0.3497.102',
1404 '71.0.3553.3',
1405 '70.0.3538.20',
1406 '69.0.3497.101',
1407 '71.0.3553.2',
1408 '69.0.3497.100',
1409 '71.0.3553.1',
1410 '71.0.3553.0',
1411 '70.0.3538.19',
1412 '69.0.3497.99',
1413 '69.0.3497.98',
1414 '69.0.3497.97',
1415 '71.0.3552.6',
1416 '71.0.3552.5',
1417 '71.0.3552.4',
1418 '71.0.3552.3',
1419 '71.0.3552.2',
1420 '71.0.3552.1',
1421 '71.0.3552.0',
1422 '70.0.3538.18',
1423 '69.0.3497.96',
1424 '71.0.3551.3',
1425 '71.0.3551.2',
1426 '71.0.3551.1',
1427 '71.0.3551.0',
1428 '70.0.3538.17',
1429 '69.0.3497.95',
1430 '71.0.3550.3',
1431 '71.0.3550.2',
1432 '71.0.3550.1',
1433 '71.0.3550.0',
1434 '70.0.3538.16',
1435 '69.0.3497.94',
1436 '71.0.3549.1',
1437 '71.0.3549.0',
1438 '70.0.3538.15',
1439 '69.0.3497.93',
1440 '69.0.3497.92',
1441 '71.0.3548.1',
1442 '71.0.3548.0',
1443 '70.0.3538.14',
1444 '69.0.3497.91',
1445 '71.0.3547.1',
1446 '71.0.3547.0',
1447 '70.0.3538.13',
1448 '69.0.3497.90',
1449 '71.0.3546.2',
1450 '69.0.3497.89',
1451 '71.0.3546.1',
1452 '71.0.3546.0',
1453 '70.0.3538.12',
1454 '69.0.3497.88',
1455 '71.0.3545.4',
1456 '71.0.3545.3',
1457 '71.0.3545.2',
1458 '71.0.3545.1',
1459 '71.0.3545.0',
1460 '70.0.3538.11',
1461 '69.0.3497.87',
1462 '71.0.3544.5',
1463 '71.0.3544.4',
1464 '71.0.3544.3',
1465 '71.0.3544.2',
1466 '71.0.3544.1',
1467 '71.0.3544.0',
1468 '69.0.3497.86',
1469 '70.0.3538.10',
1470 '69.0.3497.85',
1471 '70.0.3538.9',
1472 '69.0.3497.84',
1473 '71.0.3543.4',
1474 '70.0.3538.8',
1475 '71.0.3543.3',
1476 '71.0.3543.2',
1477 '71.0.3543.1',
1478 '71.0.3543.0',
1479 '70.0.3538.7',
1480 '69.0.3497.83',
1481 '71.0.3542.2',
1482 '71.0.3542.1',
1483 '71.0.3542.0',
1484 '70.0.3538.6',
1485 '69.0.3497.82',
1486 '69.0.3497.81',
1487 '71.0.3541.1',
1488 '71.0.3541.0',
1489 '70.0.3538.5',
1490 '69.0.3497.80',
1491 '71.0.3540.1',
1492 '71.0.3540.0',
1493 '70.0.3538.4',
1494 '69.0.3497.79',
1495 '70.0.3538.3',
1496 '71.0.3539.1',
1497 '71.0.3539.0',
1498 '69.0.3497.78',
1499 '68.0.3440.134',
1500 '69.0.3497.77',
1501 '70.0.3538.2',
1502 '70.0.3538.1',
1503 '70.0.3538.0',
1504 '69.0.3497.76',
1505 '68.0.3440.133',
1506 '69.0.3497.75',
1507 '70.0.3537.2',
1508 '70.0.3537.1',
1509 '70.0.3537.0',
1510 '69.0.3497.74',
1511 '68.0.3440.132',
1512 '70.0.3536.0',
1513 '70.0.3535.5',
1514 '70.0.3535.4',
1515 '70.0.3535.3',
1516 '69.0.3497.73',
1517 '68.0.3440.131',
1518 '70.0.3532.8',
1519 '70.0.3532.7',
1520 '69.0.3497.72',
1521 '69.0.3497.71',
1522 '70.0.3535.2',
1523 '70.0.3535.1',
1524 '70.0.3535.0',
1525 '69.0.3497.70',
1526 '68.0.3440.130',
1527 '69.0.3497.69',
1528 '68.0.3440.129',
1529 '70.0.3534.4',
1530 '70.0.3534.3',
1531 '70.0.3534.2',
1532 '70.0.3534.1',
1533 '70.0.3534.0',
1534 '69.0.3497.68',
1535 '68.0.3440.128',
1536 '70.0.3533.2',
1537 '70.0.3533.1',
1538 '70.0.3533.0',
1539 '69.0.3497.67',
1540 '68.0.3440.127',
1541 '70.0.3532.6',
1542 '70.0.3532.5',
1543 '70.0.3532.4',
1544 '69.0.3497.66',
1545 '68.0.3440.126',
1546 '70.0.3532.3',
1547 '70.0.3532.2',
1548 '70.0.3532.1',
1549 '69.0.3497.60',
1550 '69.0.3497.65',
1551 '69.0.3497.64',
1552 '70.0.3532.0',
1553 '70.0.3531.0',
1554 '70.0.3530.4',
1555 '70.0.3530.3',
1556 '70.0.3530.2',
1557 '69.0.3497.58',
1558 '68.0.3440.125',
1559 '69.0.3497.57',
1560 '69.0.3497.56',
1561 '69.0.3497.55',
1562 '69.0.3497.54',
1563 '70.0.3530.1',
1564 '70.0.3530.0',
1565 '69.0.3497.53',
1566 '68.0.3440.124',
1567 '69.0.3497.52',
1568 '70.0.3529.3',
1569 '70.0.3529.2',
1570 '70.0.3529.1',
1571 '70.0.3529.0',
1572 '69.0.3497.51',
1573 '70.0.3528.4',
1574 '68.0.3440.123',
1575 '70.0.3528.3',
1576 '70.0.3528.2',
1577 '70.0.3528.1',
1578 '70.0.3528.0',
1579 '69.0.3497.50',
1580 '68.0.3440.122',
1581 '70.0.3527.1',
1582 '70.0.3527.0',
1583 '69.0.3497.49',
1584 '68.0.3440.121',
1585 '70.0.3526.1',
1586 '70.0.3526.0',
1587 '68.0.3440.120',
1588 '69.0.3497.48',
1589 '69.0.3497.47',
1590 '68.0.3440.119',
1591 '68.0.3440.118',
1592 '70.0.3525.5',
1593 '70.0.3525.4',
1594 '70.0.3525.3',
1595 '68.0.3440.117',
1596 '69.0.3497.46',
1597 '70.0.3525.2',
1598 '70.0.3525.1',
1599 '70.0.3525.0',
1600 '69.0.3497.45',
1601 '68.0.3440.116',
1602 '70.0.3524.4',
1603 '70.0.3524.3',
1604 '69.0.3497.44',
1605 '70.0.3524.2',
1606 '70.0.3524.1',
1607 '70.0.3524.0',
1608 '70.0.3523.2',
1609 '69.0.3497.43',
1610 '68.0.3440.115',
1611 '70.0.3505.9',
1612 '69.0.3497.42',
1613 '70.0.3505.8',
1614 '70.0.3523.1',
1615 '70.0.3523.0',
1616 '69.0.3497.41',
1617 '68.0.3440.114',
1618 '70.0.3505.7',
1619 '69.0.3497.40',
1620 '70.0.3522.1',
1621 '70.0.3522.0',
1622 '70.0.3521.2',
1623 '69.0.3497.39',
1624 '68.0.3440.113',
1625 '70.0.3505.6',
1626 '70.0.3521.1',
1627 '70.0.3521.0',
1628 '69.0.3497.38',
1629 '68.0.3440.112',
1630 '70.0.3520.1',
1631 '70.0.3520.0',
1632 '69.0.3497.37',
1633 '68.0.3440.111',
1634 '70.0.3519.3',
1635 '70.0.3519.2',
1636 '70.0.3519.1',
1637 '70.0.3519.0',
1638 '69.0.3497.36',
1639 '68.0.3440.110',
1640 '70.0.3518.1',
1641 '70.0.3518.0',
1642 '69.0.3497.35',
1643 '69.0.3497.34',
1644 '68.0.3440.109',
1645 '70.0.3517.1',
1646 '70.0.3517.0',
1647 '69.0.3497.33',
1648 '68.0.3440.108',
1649 '69.0.3497.32',
1650 '70.0.3516.3',
1651 '70.0.3516.2',
1652 '70.0.3516.1',
1653 '70.0.3516.0',
1654 '69.0.3497.31',
1655 '68.0.3440.107',
1656 '70.0.3515.4',
1657 '68.0.3440.106',
1658 '70.0.3515.3',
1659 '70.0.3515.2',
1660 '70.0.3515.1',
1661 '70.0.3515.0',
1662 '69.0.3497.30',
1663 '68.0.3440.105',
1664 '68.0.3440.104',
1665 '70.0.3514.2',
1666 '70.0.3514.1',
1667 '70.0.3514.0',
1668 '69.0.3497.29',
1669 '68.0.3440.103',
1670 '70.0.3513.1',
1671 '70.0.3513.0',
1672 '69.0.3497.28',
1673 )
1674 return _USER_AGENT_TPL % random.choice(_CHROME_VERSIONS)
1675
1676
3e669f36 1677std_headers = {
f7a147e3 1678 'User-Agent': random_user_agent(),
59ae15a5
PH
1679 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
1680 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
1681 'Accept-Encoding': 'gzip, deflate',
1682 'Accept-Language': 'en-us,en;q=0.5',
3e669f36 1683}
f427df17 1684
5f6a1245 1685
fb37eb25
S
1686USER_AGENTS = {
1687 'Safari': 'Mozilla/5.0 (X11; Linux x86_64; rv:10.0) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27',
1688}
1689
1690
bf42a990
S
1691NO_DEFAULT = object()
1692
7105440c
YCH
1693ENGLISH_MONTH_NAMES = [
1694 'January', 'February', 'March', 'April', 'May', 'June',
1695 'July', 'August', 'September', 'October', 'November', 'December']
1696
f6717dec
S
1697MONTH_NAMES = {
1698 'en': ENGLISH_MONTH_NAMES,
1699 'fr': [
3e4185c3
S
1700 'janvier', 'février', 'mars', 'avril', 'mai', 'juin',
1701 'juillet', 'août', 'septembre', 'octobre', 'novembre', 'décembre'],
f6717dec 1702}
a942d6cb 1703
a7aaa398
S
1704KNOWN_EXTENSIONS = (
1705 'mp4', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'aac',
1706 'flv', 'f4v', 'f4a', 'f4b',
1707 'webm', 'ogg', 'ogv', 'oga', 'ogx', 'spx', 'opus',
1708 'mkv', 'mka', 'mk3d',
1709 'avi', 'divx',
1710 'mov',
1711 'asf', 'wmv', 'wma',
1712 '3gp', '3g2',
1713 'mp3',
1714 'flac',
1715 'ape',
1716 'wav',
1717 'f4f', 'f4m', 'm3u8', 'smil')
1718
c587cbb7 1719# needed for sanitizing filenames in restricted mode
c8827027 1720ACCENT_CHARS = dict(zip('ÂÃÄÀÁÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖŐØŒÙÚÛÜŰÝÞßàáâãäåæçèéêëìíîïðñòóôõöőøœùúûüűýþÿ',
fd35d8cd
JW
1721 itertools.chain('AAAAAA', ['AE'], 'CEEEEIIIIDNOOOOOOO', ['OE'], 'UUUUUY', ['TH', 'ss'],
1722 'aaaaaa', ['ae'], 'ceeeeiiiionooooooo', ['oe'], 'uuuuuy', ['th'], 'y')))
c587cbb7 1723
46f59e89
S
1724DATE_FORMATS = (
1725 '%d %B %Y',
1726 '%d %b %Y',
1727 '%B %d %Y',
cb655f34
S
1728 '%B %dst %Y',
1729 '%B %dnd %Y',
9d30c213 1730 '%B %drd %Y',
cb655f34 1731 '%B %dth %Y',
46f59e89 1732 '%b %d %Y',
cb655f34
S
1733 '%b %dst %Y',
1734 '%b %dnd %Y',
9d30c213 1735 '%b %drd %Y',
cb655f34 1736 '%b %dth %Y',
46f59e89
S
1737 '%b %dst %Y %I:%M',
1738 '%b %dnd %Y %I:%M',
9d30c213 1739 '%b %drd %Y %I:%M',
46f59e89
S
1740 '%b %dth %Y %I:%M',
1741 '%Y %m %d',
1742 '%Y-%m-%d',
1743 '%Y/%m/%d',
81c13222 1744 '%Y/%m/%d %H:%M',
46f59e89 1745 '%Y/%m/%d %H:%M:%S',
0c1c6f4b 1746 '%Y-%m-%d %H:%M',
46f59e89
S
1747 '%Y-%m-%d %H:%M:%S',
1748 '%Y-%m-%d %H:%M:%S.%f',
5014558a 1749 '%Y-%m-%d %H:%M:%S:%f',
46f59e89
S
1750 '%d.%m.%Y %H:%M',
1751 '%d.%m.%Y %H.%M',
1752 '%Y-%m-%dT%H:%M:%SZ',
1753 '%Y-%m-%dT%H:%M:%S.%fZ',
1754 '%Y-%m-%dT%H:%M:%S.%f0Z',
1755 '%Y-%m-%dT%H:%M:%S',
1756 '%Y-%m-%dT%H:%M:%S.%f',
1757 '%Y-%m-%dT%H:%M',
c6eed6b8
S
1758 '%b %d %Y at %H:%M',
1759 '%b %d %Y at %H:%M:%S',
b555ae9b
S
1760 '%B %d %Y at %H:%M',
1761 '%B %d %Y at %H:%M:%S',
46f59e89
S
1762)
1763
1764DATE_FORMATS_DAY_FIRST = list(DATE_FORMATS)
1765DATE_FORMATS_DAY_FIRST.extend([
1766 '%d-%m-%Y',
1767 '%d.%m.%Y',
1768 '%d.%m.%y',
1769 '%d/%m/%Y',
1770 '%d/%m/%y',
1771 '%d/%m/%Y %H:%M:%S',
1772])
1773
1774DATE_FORMATS_MONTH_FIRST = list(DATE_FORMATS)
1775DATE_FORMATS_MONTH_FIRST.extend([
1776 '%m-%d-%Y',
1777 '%m.%d.%Y',
1778 '%m/%d/%Y',
1779 '%m/%d/%y',
1780 '%m/%d/%Y %H:%M:%S',
1781])
1782
06b3fe29 1783PACKED_CODES_RE = r"}\('(.+)',(\d+),(\d+),'([^']+)'\.split\('\|'\)"
22f5f5c6 1784JSON_LD_RE = r'(?is)<script[^>]+type=(["\']?)application/ld\+json\1[^>]*>(?P<json_ld>.+?)</script>'
06b3fe29 1785
7105440c 1786
d77c3dfd 1787def preferredencoding():
59ae15a5 1788 """Get preferred encoding.
d77c3dfd 1789
59ae15a5
PH
1790 Returns the best encoding scheme for the system, based on
1791 locale.getpreferredencoding() and some further tweaks.
1792 """
1793 try:
1794 pref = locale.getpreferredencoding()
28e614de 1795 'TEST'.encode(pref)
70a1165b 1796 except Exception:
59ae15a5 1797 pref = 'UTF-8'
bae611f2 1798
59ae15a5 1799 return pref
d77c3dfd 1800
f4bfd65f 1801
181c8655 1802def write_json_file(obj, fn):
1394646a 1803 """ Encode obj as JSON and write it to fn, atomically if possible """
181c8655 1804
92120217 1805 fn = encodeFilename(fn)
61ee5aeb 1806 if sys.version_info < (3, 0) and sys.platform != 'win32':
ec5f6016
JMF
1807 encoding = get_filesystem_encoding()
1808 # os.path.basename returns a bytes object, but NamedTemporaryFile
1809 # will fail if the filename contains non ascii characters unless we
1810 # use a unicode object
1811 path_basename = lambda f: os.path.basename(fn).decode(encoding)
1812 # the same for os.path.dirname
1813 path_dirname = lambda f: os.path.dirname(fn).decode(encoding)
1814 else:
1815 path_basename = os.path.basename
1816 path_dirname = os.path.dirname
1817
73159f99
S
1818 args = {
1819 'suffix': '.tmp',
ec5f6016
JMF
1820 'prefix': path_basename(fn) + '.',
1821 'dir': path_dirname(fn),
73159f99
S
1822 'delete': False,
1823 }
1824
181c8655
PH
1825 # In Python 2.x, json.dump expects a bytestream.
1826 # In Python 3.x, it writes to a character stream
1827 if sys.version_info < (3, 0):
73159f99 1828 args['mode'] = 'wb'
181c8655 1829 else:
73159f99
S
1830 args.update({
1831 'mode': 'w',
1832 'encoding': 'utf-8',
1833 })
1834
c86b6142 1835 tf = tempfile.NamedTemporaryFile(**compat_kwargs(args))
181c8655
PH
1836
1837 try:
1838 with tf:
75d43ca0 1839 json.dump(obj, tf, default=repr)
1394646a
IK
1840 if sys.platform == 'win32':
1841 # Need to remove existing file on Windows, else os.rename raises
1842 # WindowsError or FileExistsError.
1843 try:
1844 os.unlink(fn)
1845 except OSError:
1846 pass
9cd5f54e
R
1847 try:
1848 mask = os.umask(0)
1849 os.umask(mask)
1850 os.chmod(tf.name, 0o666 & ~mask)
1851 except OSError:
1852 pass
181c8655 1853 os.rename(tf.name, fn)
70a1165b 1854 except Exception:
181c8655
PH
1855 try:
1856 os.remove(tf.name)
1857 except OSError:
1858 pass
1859 raise
1860
1861
1862if sys.version_info >= (2, 7):
ee114368 1863 def find_xpath_attr(node, xpath, key, val=None):
59ae56fa 1864 """ Find the xpath xpath[@key=val] """
5d2354f1 1865 assert re.match(r'^[a-zA-Z_-]+$', key)
ee114368 1866 expr = xpath + ('[@%s]' % key if val is None else "[@%s='%s']" % (key, val))
59ae56fa
PH
1867 return node.find(expr)
1868else:
ee114368 1869 def find_xpath_attr(node, xpath, key, val=None):
810c10ba 1870 for f in node.findall(compat_xpath(xpath)):
ee114368
S
1871 if key not in f.attrib:
1872 continue
1873 if val is None or f.attrib.get(key) == val:
59ae56fa
PH
1874 return f
1875 return None
1876
d7e66d39
JMF
1877# On python2.6 the xml.etree.ElementTree.Element methods don't support
1878# the namespace parameter
5f6a1245
JW
1879
1880
d7e66d39
JMF
1881def xpath_with_ns(path, ns_map):
1882 components = [c.split(':') for c in path.split('/')]
1883 replaced = []
1884 for c in components:
1885 if len(c) == 1:
1886 replaced.append(c[0])
1887 else:
1888 ns, tag = c
1889 replaced.append('{%s}%s' % (ns_map[ns], tag))
1890 return '/'.join(replaced)
1891
d77c3dfd 1892
a41fb80c 1893def xpath_element(node, xpath, name=None, fatal=False, default=NO_DEFAULT):
578c0745 1894 def _find_xpath(xpath):
810c10ba 1895 return node.find(compat_xpath(xpath))
578c0745
S
1896
1897 if isinstance(xpath, (str, compat_str)):
1898 n = _find_xpath(xpath)
1899 else:
1900 for xp in xpath:
1901 n = _find_xpath(xp)
1902 if n is not None:
1903 break
d74bebd5 1904
8e636da4 1905 if n is None:
bf42a990
S
1906 if default is not NO_DEFAULT:
1907 return default
1908 elif fatal:
bf0ff932
PH
1909 name = xpath if name is None else name
1910 raise ExtractorError('Could not find XML element %s' % name)
1911 else:
1912 return None
a41fb80c
S
1913 return n
1914
1915
1916def xpath_text(node, xpath, name=None, fatal=False, default=NO_DEFAULT):
8e636da4
S
1917 n = xpath_element(node, xpath, name, fatal=fatal, default=default)
1918 if n is None or n == default:
1919 return n
1920 if n.text is None:
1921 if default is not NO_DEFAULT:
1922 return default
1923 elif fatal:
1924 name = xpath if name is None else name
1925 raise ExtractorError('Could not find XML element\'s text %s' % name)
1926 else:
1927 return None
1928 return n.text
a41fb80c
S
1929
1930
1931def xpath_attr(node, xpath, key, name=None, fatal=False, default=NO_DEFAULT):
1932 n = find_xpath_attr(node, xpath, key)
1933 if n is None:
1934 if default is not NO_DEFAULT:
1935 return default
1936 elif fatal:
1937 name = '%s[@%s]' % (xpath, key) if name is None else name
1938 raise ExtractorError('Could not find XML attribute %s' % name)
1939 else:
1940 return None
1941 return n.attrib[key]
bf0ff932
PH
1942
1943
9e6dd238 1944def get_element_by_id(id, html):
43e8fafd 1945 """Return the content of the tag with the specified ID in the passed HTML document"""
611c1dd9 1946 return get_element_by_attribute('id', id, html)
43e8fafd 1947
12ea2f30 1948
84c237fb 1949def get_element_by_class(class_name, html):
2af12ad9
TC
1950 """Return the content of the first tag with the specified class in the passed HTML document"""
1951 retval = get_elements_by_class(class_name, html)
1952 return retval[0] if retval else None
1953
1954
1955def get_element_by_attribute(attribute, value, html, escape_value=True):
1956 retval = get_elements_by_attribute(attribute, value, html, escape_value)
1957 return retval[0] if retval else None
1958
1959
1960def get_elements_by_class(class_name, html):
1961 """Return the content of all tags with the specified class in the passed HTML document as a list"""
1962 return get_elements_by_attribute(
84c237fb
YCH
1963 'class', r'[^\'"]*\b%s\b[^\'"]*' % re.escape(class_name),
1964 html, escape_value=False)
1965
1966
2af12ad9 1967def get_elements_by_attribute(attribute, value, html, escape_value=True):
43e8fafd 1968 """Return the content of the tag with the specified attribute in the passed HTML document"""
9e6dd238 1969
84c237fb
YCH
1970 value = re.escape(value) if escape_value else value
1971
2af12ad9
TC
1972 retlist = []
1973 for m in re.finditer(r'''(?xs)
38285056 1974 <([a-zA-Z0-9:._-]+)
609ff8ca 1975 (?:\s+[a-zA-Z0-9:._-]+(?:=[a-zA-Z0-9:._-]*|="[^"]*"|='[^']*'|))*?
38285056 1976 \s+%s=['"]?%s['"]?
609ff8ca 1977 (?:\s+[a-zA-Z0-9:._-]+(?:=[a-zA-Z0-9:._-]*|="[^"]*"|='[^']*'|))*?
38285056
PH
1978 \s*>
1979 (?P<content>.*?)
1980 </\1>
2af12ad9
TC
1981 ''' % (re.escape(attribute), value), html):
1982 res = m.group('content')
38285056 1983
2af12ad9
TC
1984 if res.startswith('"') or res.startswith("'"):
1985 res = res[1:-1]
38285056 1986
2af12ad9 1987 retlist.append(unescapeHTML(res))
a921f407 1988
2af12ad9 1989 return retlist
a921f407 1990
c5229f39 1991
8bb56eee
BF
1992class HTMLAttributeParser(compat_HTMLParser):
1993 """Trivial HTML parser to gather the attributes for a single element"""
b6e0c7d2 1994
8bb56eee 1995 def __init__(self):
c5229f39 1996 self.attrs = {}
8bb56eee
BF
1997 compat_HTMLParser.__init__(self)
1998
1999 def handle_starttag(self, tag, attrs):
2000 self.attrs = dict(attrs)
2001
c5229f39 2002
8bb56eee
BF
2003def extract_attributes(html_element):
2004 """Given a string for an HTML element such as
2005 <el
2006 a="foo" B="bar" c="&98;az" d=boz
2007 empty= noval entity="&amp;"
2008 sq='"' dq="'"
2009 >
2010 Decode and return a dictionary of attributes.
2011 {
2012 'a': 'foo', 'b': 'bar', c: 'baz', d: 'boz',
2013 'empty': '', 'noval': None, 'entity': '&',
2014 'sq': '"', 'dq': '\''
2015 }.
2016 NB HTMLParser is stricter in Python 2.6 & 3.2 than in later versions,
2017 but the cases in the unit test will work for all of 2.6, 2.7, 3.2-3.5.
2018 """
2019 parser = HTMLAttributeParser()
b4a3d461
S
2020 try:
2021 parser.feed(html_element)
2022 parser.close()
2023 # Older Python may throw HTMLParseError in case of malformed HTML
2024 except compat_HTMLParseError:
2025 pass
8bb56eee 2026 return parser.attrs
9e6dd238 2027
c5229f39 2028
9e6dd238 2029def clean_html(html):
59ae15a5 2030 """Clean an HTML snippet into a readable string"""
dd622d7c
PH
2031
2032 if html is None: # Convenience for sanitizing descriptions etc.
2033 return html
2034
59ae15a5
PH
2035 # Newline vs <br />
2036 html = html.replace('\n', ' ')
edd9221c
TF
2037 html = re.sub(r'(?u)\s*<\s*br\s*/?\s*>\s*', '\n', html)
2038 html = re.sub(r'(?u)<\s*/\s*p\s*>\s*<\s*p[^>]*>', '\n', html)
59ae15a5
PH
2039 # Strip html tags
2040 html = re.sub('<.*?>', '', html)
2041 # Replace html entities
2042 html = unescapeHTML(html)
7decf895 2043 return html.strip()
9e6dd238
FV
2044
2045
d77c3dfd 2046def sanitize_open(filename, open_mode):
59ae15a5
PH
2047 """Try to open the given filename, and slightly tweak it if this fails.
2048
2049 Attempts to open the given filename. If this fails, it tries to change
2050 the filename slightly, step by step, until it's either able to open it
2051 or it fails and raises a final exception, like the standard open()
2052 function.
2053
2054 It returns the tuple (stream, definitive_file_name).
2055 """
2056 try:
28e614de 2057 if filename == '-':
59ae15a5
PH
2058 if sys.platform == 'win32':
2059 import msvcrt
2060 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
898280a0 2061 return (sys.stdout.buffer if hasattr(sys.stdout, 'buffer') else sys.stdout, filename)
59ae15a5
PH
2062 stream = open(encodeFilename(filename), open_mode)
2063 return (stream, filename)
2064 except (IOError, OSError) as err:
f45c185f
PH
2065 if err.errno in (errno.EACCES,):
2066 raise
59ae15a5 2067
f45c185f 2068 # In case of error, try to remove win32 forbidden chars
d55de57b 2069 alt_filename = sanitize_path(filename)
f45c185f
PH
2070 if alt_filename == filename:
2071 raise
2072 else:
2073 # An exception here should be caught in the caller
d55de57b 2074 stream = open(encodeFilename(alt_filename), open_mode)
f45c185f 2075 return (stream, alt_filename)
d77c3dfd
FV
2076
2077
2078def timeconvert(timestr):
59ae15a5
PH
2079 """Convert RFC 2822 defined time string into system timestamp"""
2080 timestamp = None
2081 timetuple = email.utils.parsedate_tz(timestr)
2082 if timetuple is not None:
2083 timestamp = email.utils.mktime_tz(timetuple)
2084 return timestamp
1c469a94 2085
5f6a1245 2086
796173d0 2087def sanitize_filename(s, restricted=False, is_id=False):
59ae15a5
PH
2088 """Sanitizes a string so it could be used as part of a filename.
2089 If restricted is set, use a stricter subset of allowed characters.
158af524
S
2090 Set is_id if this is not an arbitrary string, but an ID that should be kept
2091 if possible.
59ae15a5
PH
2092 """
2093 def replace_insane(char):
c587cbb7
AT
2094 if restricted and char in ACCENT_CHARS:
2095 return ACCENT_CHARS[char]
59ae15a5
PH
2096 if char == '?' or ord(char) < 32 or ord(char) == 127:
2097 return ''
2098 elif char == '"':
2099 return '' if restricted else '\''
2100 elif char == ':':
2101 return '_-' if restricted else ' -'
2102 elif char in '\\/|*<>':
2103 return '_'
627dcfff 2104 if restricted and (char in '!&\'()[]{}$;`^,#' or char.isspace()):
59ae15a5
PH
2105 return '_'
2106 if restricted and ord(char) > 127:
2107 return '_'
2108 return char
2109
639f1cea 2110 if s == '':
2111 return ''
2aeb06d6
PH
2112 # Handle timestamps
2113 s = re.sub(r'[0-9]+(?::[0-9]+)+', lambda m: m.group(0).replace(':', '_'), s)
28e614de 2114 result = ''.join(map(replace_insane, s))
796173d0
PH
2115 if not is_id:
2116 while '__' in result:
2117 result = result.replace('__', '_')
2118 result = result.strip('_')
2119 # Common case of "Foreign band name - English song title"
2120 if restricted and result.startswith('-_'):
2121 result = result[2:]
5a42414b
PH
2122 if result.startswith('-'):
2123 result = '_' + result[len('-'):]
a7440261 2124 result = result.lstrip('.')
796173d0
PH
2125 if not result:
2126 result = '_'
59ae15a5 2127 return result
d77c3dfd 2128
5f6a1245 2129
c2934512 2130def sanitize_path(s, force=False):
a2aaf4db 2131 """Sanitizes and normalizes path on Windows"""
c2934512 2132 if sys.platform == 'win32':
c4218ac3 2133 force = False
c2934512 2134 drive_or_unc, _ = os.path.splitdrive(s)
2135 if sys.version_info < (2, 7) and not drive_or_unc:
2136 drive_or_unc, _ = os.path.splitunc(s)
2137 elif force:
2138 drive_or_unc = ''
2139 else:
a2aaf4db 2140 return s
c2934512 2141
be531ef1
S
2142 norm_path = os.path.normpath(remove_start(s, drive_or_unc)).split(os.path.sep)
2143 if drive_or_unc:
a2aaf4db
S
2144 norm_path.pop(0)
2145 sanitized_path = [
ec85ded8 2146 path_part if path_part in ['.', '..'] else re.sub(r'(?:[/<>:"\|\\?\*]|[\s.]$)', '#', path_part)
a2aaf4db 2147 for path_part in norm_path]
be531ef1
S
2148 if drive_or_unc:
2149 sanitized_path.insert(0, drive_or_unc + os.path.sep)
c4218ac3 2150 elif force and s[0] == os.path.sep:
2151 sanitized_path.insert(0, os.path.sep)
a2aaf4db
S
2152 return os.path.join(*sanitized_path)
2153
2154
17bcc626 2155def sanitize_url(url):
befa4708
S
2156 # Prepend protocol-less URLs with `http:` scheme in order to mitigate
2157 # the number of unwanted failures due to missing protocol
2158 if url.startswith('//'):
2159 return 'http:%s' % url
2160 # Fix some common typos seen so far
2161 COMMON_TYPOS = (
067aa17e 2162 # https://github.com/ytdl-org/youtube-dl/issues/15649
befa4708
S
2163 (r'^httpss://', r'https://'),
2164 # https://bx1.be/lives/direct-tv/
2165 (r'^rmtp([es]?)://', r'rtmp\1://'),
2166 )
2167 for mistake, fixup in COMMON_TYPOS:
2168 if re.match(mistake, url):
2169 return re.sub(mistake, fixup, url)
bc6b9bcd 2170 return url
17bcc626
S
2171
2172
5435dcf9
HH
2173def extract_basic_auth(url):
2174 parts = compat_urlparse.urlsplit(url)
2175 if parts.username is None:
2176 return url, None
2177 url = compat_urlparse.urlunsplit(parts._replace(netloc=(
2178 parts.hostname if parts.port is None
2179 else '%s:%d' % (parts.hostname, parts.port))))
2180 auth_payload = base64.b64encode(
2181 ('%s:%s' % (parts.username, parts.password or '')).encode('utf-8'))
2182 return url, 'Basic ' + auth_payload.decode('utf-8')
2183
2184
67dda517 2185def sanitized_Request(url, *args, **kwargs):
bc6b9bcd 2186 url, auth_header = extract_basic_auth(escape_url(sanitize_url(url)))
5435dcf9
HH
2187 if auth_header is not None:
2188 headers = args[1] if len(args) >= 2 else kwargs.setdefault('headers', {})
2189 headers['Authorization'] = auth_header
2190 return compat_urllib_request.Request(url, *args, **kwargs)
67dda517
S
2191
2192
51098426
S
2193def expand_path(s):
2194 """Expand shell variables and ~"""
2195 return os.path.expandvars(compat_expanduser(s))
2196
2197
d77c3dfd 2198def orderedSet(iterable):
59ae15a5
PH
2199 """ Remove all duplicates from the input iterable """
2200 res = []
2201 for el in iterable:
2202 if el not in res:
2203 res.append(el)
2204 return res
d77c3dfd 2205
912b38b4 2206
55b2f099 2207def _htmlentity_transform(entity_with_semicolon):
4e408e47 2208 """Transforms an HTML entity to a character."""
55b2f099
YCH
2209 entity = entity_with_semicolon[:-1]
2210
4e408e47
PH
2211 # Known non-numeric HTML entity
2212 if entity in compat_html_entities.name2codepoint:
2213 return compat_chr(compat_html_entities.name2codepoint[entity])
2214
55b2f099
YCH
2215 # TODO: HTML5 allows entities without a semicolon. For example,
2216 # '&Eacuteric' should be decoded as 'Éric'.
2217 if entity_with_semicolon in compat_html_entities_html5:
2218 return compat_html_entities_html5[entity_with_semicolon]
2219
91757b0f 2220 mobj = re.match(r'#(x[0-9a-fA-F]+|[0-9]+)', entity)
4e408e47
PH
2221 if mobj is not None:
2222 numstr = mobj.group(1)
28e614de 2223 if numstr.startswith('x'):
4e408e47 2224 base = 16
28e614de 2225 numstr = '0%s' % numstr
4e408e47
PH
2226 else:
2227 base = 10
067aa17e 2228 # See https://github.com/ytdl-org/youtube-dl/issues/7518
7aefc49c
S
2229 try:
2230 return compat_chr(int(numstr, base))
2231 except ValueError:
2232 pass
4e408e47
PH
2233
2234 # Unknown entity in name, return its literal representation
7a3f0c00 2235 return '&%s;' % entity
4e408e47
PH
2236
2237
d77c3dfd 2238def unescapeHTML(s):
912b38b4
PH
2239 if s is None:
2240 return None
2241 assert type(s) == compat_str
d77c3dfd 2242
4e408e47 2243 return re.sub(
95f3f7c2 2244 r'&([^&;]+;)', lambda m: _htmlentity_transform(m.group(1)), s)
d77c3dfd 2245
8bf48f23 2246
f5b1bca9 2247def process_communicate_or_kill(p, *args, **kwargs):
2248 try:
2249 return p.communicate(*args, **kwargs)
2250 except BaseException: # Including KeyboardInterrupt
2251 p.kill()
2252 p.wait()
2253 raise
2254
2255
aa49acd1
S
2256def get_subprocess_encoding():
2257 if sys.platform == 'win32' and sys.getwindowsversion()[0] >= 5:
2258 # For subprocess calls, encode with locale encoding
2259 # Refer to http://stackoverflow.com/a/9951851/35070
2260 encoding = preferredencoding()
2261 else:
2262 encoding = sys.getfilesystemencoding()
2263 if encoding is None:
2264 encoding = 'utf-8'
2265 return encoding
2266
2267
8bf48f23 2268def encodeFilename(s, for_subprocess=False):
59ae15a5
PH
2269 """
2270 @param s The name of the file
2271 """
d77c3dfd 2272
8bf48f23 2273 assert type(s) == compat_str
d77c3dfd 2274
59ae15a5
PH
2275 # Python 3 has a Unicode API
2276 if sys.version_info >= (3, 0):
2277 return s
0f00efed 2278
aa49acd1
S
2279 # Pass '' directly to use Unicode APIs on Windows 2000 and up
2280 # (Detecting Windows NT 4 is tricky because 'major >= 4' would
2281 # match Windows 9x series as well. Besides, NT 4 is obsolete.)
2282 if not for_subprocess and sys.platform == 'win32' and sys.getwindowsversion()[0] >= 5:
2283 return s
2284
8ee239e9
YCH
2285 # Jython assumes filenames are Unicode strings though reported as Python 2.x compatible
2286 if sys.platform.startswith('java'):
2287 return s
2288
aa49acd1
S
2289 return s.encode(get_subprocess_encoding(), 'ignore')
2290
2291
2292def decodeFilename(b, for_subprocess=False):
2293
2294 if sys.version_info >= (3, 0):
2295 return b
2296
2297 if not isinstance(b, bytes):
2298 return b
2299
2300 return b.decode(get_subprocess_encoding(), 'ignore')
8bf48f23 2301
f07b74fc
PH
2302
2303def encodeArgument(s):
2304 if not isinstance(s, compat_str):
2305 # Legacy code that uses byte strings
2306 # Uncomment the following line after fixing all post processors
7af808a5 2307 # assert False, 'Internal error: %r should be of type %r, is %r' % (s, compat_str, type(s))
f07b74fc
PH
2308 s = s.decode('ascii')
2309 return encodeFilename(s, True)
2310
2311
aa49acd1
S
2312def decodeArgument(b):
2313 return decodeFilename(b, True)
2314
2315
8271226a
PH
2316def decodeOption(optval):
2317 if optval is None:
2318 return optval
2319 if isinstance(optval, bytes):
2320 optval = optval.decode(preferredencoding())
2321
2322 assert isinstance(optval, compat_str)
2323 return optval
1c256f70 2324
5f6a1245 2325
dbbbe555 2326def formatSeconds(secs, delim=':'):
4539dd30 2327 if secs > 3600:
dbbbe555 2328 return '%d%s%02d%s%02d' % (secs // 3600, delim, (secs % 3600) // 60, delim, secs % 60)
4539dd30 2329 elif secs > 60:
dbbbe555 2330 return '%d%s%02d' % (secs // 60, delim, secs % 60)
4539dd30
PH
2331 else:
2332 return '%d' % secs
2333
a0ddb8a2 2334
be4a824d
PH
2335def make_HTTPS_handler(params, **kwargs):
2336 opts_no_check_certificate = params.get('nocheckcertificate', False)
0db261ba 2337 if hasattr(ssl, 'create_default_context'): # Python >= 3.4 or 2.7.9
be5f2c19 2338 context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
0db261ba 2339 if opts_no_check_certificate:
be5f2c19 2340 context.check_hostname = False
0db261ba 2341 context.verify_mode = ssl.CERT_NONE
a2366922 2342 try:
be4a824d 2343 return YoutubeDLHTTPSHandler(params, context=context, **kwargs)
a2366922
PH
2344 except TypeError:
2345 # Python 2.7.8
2346 # (create_default_context present but HTTPSHandler has no context=)
2347 pass
2348
2349 if sys.version_info < (3, 2):
d7932313 2350 return YoutubeDLHTTPSHandler(params, **kwargs)
aa37e3d4 2351 else: # Python < 3.4
d7932313 2352 context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ea6d901e 2353 context.verify_mode = (ssl.CERT_NONE
dca08720 2354 if opts_no_check_certificate
ea6d901e 2355 else ssl.CERT_REQUIRED)
303b479e 2356 context.set_default_verify_paths()
be4a824d 2357 return YoutubeDLHTTPSHandler(params, context=context, **kwargs)
ea6d901e 2358
732ea2f0 2359
5873d4cc 2360def bug_reports_message(before=';'):
08f2a92c 2361 if ytdl_is_updateable():
7a5c1cfe 2362 update_cmd = 'type yt-dlp -U to update'
08f2a92c 2363 else:
7a5c1cfe 2364 update_cmd = 'see https://github.com/yt-dlp/yt-dlp on how to update'
5873d4cc 2365 msg = 'please report this issue on https://github.com/yt-dlp/yt-dlp .'
08f2a92c 2366 msg += ' Make sure you are using the latest version; %s.' % update_cmd
7a5c1cfe 2367 msg += ' Be sure to call yt-dlp with the --verbose flag and include its complete output.'
5873d4cc
F
2368
2369 before = before.rstrip()
2370 if not before or before.endswith(('.', '!', '?')):
2371 msg = msg[0].title() + msg[1:]
2372
2373 return (before + ' ' if before else '') + msg
08f2a92c
JMF
2374
2375
bf5b9d85
PM
2376class YoutubeDLError(Exception):
2377 """Base exception for YoutubeDL errors."""
2378 pass
2379
2380
3158150c 2381network_exceptions = [compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error]
2382if hasattr(ssl, 'CertificateError'):
2383 network_exceptions.append(ssl.CertificateError)
2384network_exceptions = tuple(network_exceptions)
2385
2386
bf5b9d85 2387class ExtractorError(YoutubeDLError):
1c256f70 2388 """Error during info extraction."""
5f6a1245 2389
d11271dd 2390 def __init__(self, msg, tb=None, expected=False, cause=None, video_id=None):
9a82b238 2391 """ tb, if given, is the original traceback (so that it can be printed out).
7a5c1cfe 2392 If expected is set, this is a normal error message and most likely not a bug in yt-dlp.
9a82b238
PH
2393 """
2394
3158150c 2395 if sys.exc_info()[0] in network_exceptions:
9a82b238 2396 expected = True
d11271dd
PH
2397 if video_id is not None:
2398 msg = video_id + ': ' + msg
410f3e73 2399 if cause:
28e614de 2400 msg += ' (caused by %r)' % cause
9a82b238 2401 if not expected:
08f2a92c 2402 msg += bug_reports_message()
1c256f70 2403 super(ExtractorError, self).__init__(msg)
d5979c5d 2404
1c256f70 2405 self.traceback = tb
8cc83b8d 2406 self.exc_info = sys.exc_info() # preserve original exception
2eabb802 2407 self.cause = cause
d11271dd 2408 self.video_id = video_id
1c256f70 2409
01951dda
PH
2410 def format_traceback(self):
2411 if self.traceback is None:
2412 return None
28e614de 2413 return ''.join(traceback.format_tb(self.traceback))
01951dda 2414
1c256f70 2415
416c7fcb
PH
2416class UnsupportedError(ExtractorError):
2417 def __init__(self, url):
2418 super(UnsupportedError, self).__init__(
2419 'Unsupported URL: %s' % url, expected=True)
2420 self.url = url
2421
2422
55b3e45b
JMF
2423class RegexNotFoundError(ExtractorError):
2424 """Error when a regex didn't match"""
2425 pass
2426
2427
773f291d
S
2428class GeoRestrictedError(ExtractorError):
2429 """Geographic restriction Error exception.
2430
2431 This exception may be thrown when a video is not available from your
2432 geographic location due to geographic restrictions imposed by a website.
2433 """
b6e0c7d2 2434
773f291d
S
2435 def __init__(self, msg, countries=None):
2436 super(GeoRestrictedError, self).__init__(msg, expected=True)
2437 self.msg = msg
2438 self.countries = countries
2439
2440
bf5b9d85 2441class DownloadError(YoutubeDLError):
59ae15a5 2442 """Download Error exception.
d77c3dfd 2443
59ae15a5
PH
2444 This exception may be thrown by FileDownloader objects if they are not
2445 configured to continue on errors. They will contain the appropriate
2446 error message.
2447 """
5f6a1245 2448
8cc83b8d
FV
2449 def __init__(self, msg, exc_info=None):
2450 """ exc_info, if given, is the original exception that caused the trouble (as returned by sys.exc_info()). """
2451 super(DownloadError, self).__init__(msg)
2452 self.exc_info = exc_info
d77c3dfd
FV
2453
2454
498f5606 2455class EntryNotInPlaylist(YoutubeDLError):
2456 """Entry not in playlist exception.
2457
2458 This exception will be thrown by YoutubeDL when a requested entry
2459 is not found in the playlist info_dict
2460 """
2461 pass
2462
2463
bf5b9d85 2464class SameFileError(YoutubeDLError):
59ae15a5 2465 """Same File exception.
d77c3dfd 2466
59ae15a5
PH
2467 This exception will be thrown by FileDownloader objects if they detect
2468 multiple files would have to be downloaded to the same file on disk.
2469 """
2470 pass
d77c3dfd
FV
2471
2472
bf5b9d85 2473class PostProcessingError(YoutubeDLError):
59ae15a5 2474 """Post Processing exception.
d77c3dfd 2475
59ae15a5
PH
2476 This exception may be raised by PostProcessor's .run() method to
2477 indicate an error in the postprocessing task.
2478 """
5f6a1245 2479
7851b379 2480 def __init__(self, msg):
bf5b9d85 2481 super(PostProcessingError, self).__init__(msg)
7851b379 2482 self.msg = msg
d77c3dfd 2483
5f6a1245 2484
8b0d7497 2485class ExistingVideoReached(YoutubeDLError):
2486 """ --max-downloads limit has been reached. """
2487 pass
2488
2489
2490class RejectedVideoReached(YoutubeDLError):
2491 """ --max-downloads limit has been reached. """
2492 pass
2493
2494
bf5b9d85 2495class MaxDownloadsReached(YoutubeDLError):
59ae15a5
PH
2496 """ --max-downloads limit has been reached. """
2497 pass
d77c3dfd
FV
2498
2499
bf5b9d85 2500class UnavailableVideoError(YoutubeDLError):
59ae15a5 2501 """Unavailable Format exception.
d77c3dfd 2502
59ae15a5
PH
2503 This exception will be thrown when a video is requested
2504 in a format that is not available for that video.
2505 """
2506 pass
d77c3dfd
FV
2507
2508
bf5b9d85 2509class ContentTooShortError(YoutubeDLError):
59ae15a5 2510 """Content Too Short exception.
d77c3dfd 2511
59ae15a5
PH
2512 This exception may be raised by FileDownloader objects when a file they
2513 download is too small for what the server announced first, indicating
2514 the connection was probably interrupted.
2515 """
d77c3dfd 2516
59ae15a5 2517 def __init__(self, downloaded, expected):
bf5b9d85
PM
2518 super(ContentTooShortError, self).__init__(
2519 'Downloaded {0} bytes, expected {1} bytes'.format(downloaded, expected)
2520 )
2c7ed247 2521 # Both in bytes
59ae15a5
PH
2522 self.downloaded = downloaded
2523 self.expected = expected
d77c3dfd 2524
5f6a1245 2525
bf5b9d85 2526class XAttrMetadataError(YoutubeDLError):
efa97bdc
YCH
2527 def __init__(self, code=None, msg='Unknown error'):
2528 super(XAttrMetadataError, self).__init__(msg)
2529 self.code = code
bd264412 2530 self.msg = msg
efa97bdc
YCH
2531
2532 # Parsing code and msg
3089bc74 2533 if (self.code in (errno.ENOSPC, errno.EDQUOT)
a0566bbf 2534 or 'No space left' in self.msg or 'Disk quota exceeded' in self.msg):
efa97bdc
YCH
2535 self.reason = 'NO_SPACE'
2536 elif self.code == errno.E2BIG or 'Argument list too long' in self.msg:
2537 self.reason = 'VALUE_TOO_LONG'
2538 else:
2539 self.reason = 'NOT_SUPPORTED'
2540
2541
bf5b9d85 2542class XAttrUnavailableError(YoutubeDLError):
efa97bdc
YCH
2543 pass
2544
2545
c5a59d93 2546def _create_http_connection(ydl_handler, http_class, is_https, *args, **kwargs):
e5e78797
S
2547 # Working around python 2 bug (see http://bugs.python.org/issue17849) by limiting
2548 # expected HTTP responses to meet HTTP/1.0 or later (see also
067aa17e 2549 # https://github.com/ytdl-org/youtube-dl/issues/6727)
e5e78797 2550 if sys.version_info < (3, 0):
65220c3b
S
2551 kwargs['strict'] = True
2552 hc = http_class(*args, **compat_kwargs(kwargs))
be4a824d 2553 source_address = ydl_handler._params.get('source_address')
8959018a 2554
be4a824d 2555 if source_address is not None:
8959018a
AU
2556 # This is to workaround _create_connection() from socket where it will try all
2557 # address data from getaddrinfo() including IPv6. This filters the result from
2558 # getaddrinfo() based on the source_address value.
2559 # This is based on the cpython socket.create_connection() function.
2560 # https://github.com/python/cpython/blob/master/Lib/socket.py#L691
2561 def _create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None):
2562 host, port = address
2563 err = None
2564 addrs = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
9e21e6d9
S
2565 af = socket.AF_INET if '.' in source_address[0] else socket.AF_INET6
2566 ip_addrs = [addr for addr in addrs if addr[0] == af]
2567 if addrs and not ip_addrs:
2568 ip_version = 'v4' if af == socket.AF_INET else 'v6'
2569 raise socket.error(
2570 "No remote IP%s addresses available for connect, can't use '%s' as source address"
2571 % (ip_version, source_address[0]))
8959018a
AU
2572 for res in ip_addrs:
2573 af, socktype, proto, canonname, sa = res
2574 sock = None
2575 try:
2576 sock = socket.socket(af, socktype, proto)
2577 if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
2578 sock.settimeout(timeout)
2579 sock.bind(source_address)
2580 sock.connect(sa)
2581 err = None # Explicitly break reference cycle
2582 return sock
2583 except socket.error as _:
2584 err = _
2585 if sock is not None:
2586 sock.close()
2587 if err is not None:
2588 raise err
2589 else:
9e21e6d9
S
2590 raise socket.error('getaddrinfo returns an empty list')
2591 if hasattr(hc, '_create_connection'):
2592 hc._create_connection = _create_connection
be4a824d
PH
2593 sa = (source_address, 0)
2594 if hasattr(hc, 'source_address'): # Python 2.7+
2595 hc.source_address = sa
2596 else: # Python 2.6
2597 def _hc_connect(self, *args, **kwargs):
9e21e6d9 2598 sock = _create_connection(
be4a824d
PH
2599 (self.host, self.port), self.timeout, sa)
2600 if is_https:
d7932313
PH
2601 self.sock = ssl.wrap_socket(
2602 sock, self.key_file, self.cert_file,
2603 ssl_version=ssl.PROTOCOL_TLSv1)
be4a824d
PH
2604 else:
2605 self.sock = sock
2606 hc.connect = functools.partial(_hc_connect, hc)
2607
2608 return hc
2609
2610
87f0e62d 2611def handle_youtubedl_headers(headers):
992fc9d6
YCH
2612 filtered_headers = headers
2613
2614 if 'Youtubedl-no-compression' in filtered_headers:
2615 filtered_headers = dict((k, v) for k, v in filtered_headers.items() if k.lower() != 'accept-encoding')
87f0e62d 2616 del filtered_headers['Youtubedl-no-compression']
87f0e62d 2617
992fc9d6 2618 return filtered_headers
87f0e62d
YCH
2619
2620
acebc9cd 2621class YoutubeDLHandler(compat_urllib_request.HTTPHandler):
59ae15a5
PH
2622 """Handler for HTTP requests and responses.
2623
2624 This class, when installed with an OpenerDirector, automatically adds
2625 the standard headers to every HTTP request and handles gzipped and
2626 deflated responses from web servers. If compression is to be avoided in
2627 a particular request, the original request in the program code only has
0424ec30 2628 to include the HTTP header "Youtubedl-no-compression", which will be
59ae15a5
PH
2629 removed before making the real request.
2630
2631 Part of this code was copied from:
2632
2633 http://techknack.net/python-urllib2-handlers/
2634
2635 Andrew Rowls, the author of that code, agreed to release it to the
2636 public domain.
2637 """
2638
be4a824d
PH
2639 def __init__(self, params, *args, **kwargs):
2640 compat_urllib_request.HTTPHandler.__init__(self, *args, **kwargs)
2641 self._params = params
2642
2643 def http_open(self, req):
71aff188
YCH
2644 conn_class = compat_http_client.HTTPConnection
2645
2646 socks_proxy = req.headers.get('Ytdl-socks-proxy')
2647 if socks_proxy:
2648 conn_class = make_socks_conn_class(conn_class, socks_proxy)
2649 del req.headers['Ytdl-socks-proxy']
2650
be4a824d 2651 return self.do_open(functools.partial(
71aff188 2652 _create_http_connection, self, conn_class, False),
be4a824d
PH
2653 req)
2654
59ae15a5
PH
2655 @staticmethod
2656 def deflate(data):
fc2119f2 2657 if not data:
2658 return data
59ae15a5
PH
2659 try:
2660 return zlib.decompress(data, -zlib.MAX_WBITS)
2661 except zlib.error:
2662 return zlib.decompress(data)
2663
acebc9cd 2664 def http_request(self, req):
51f267d9
S
2665 # According to RFC 3986, URLs can not contain non-ASCII characters, however this is not
2666 # always respected by websites, some tend to give out URLs with non percent-encoded
2667 # non-ASCII characters (see telemb.py, ard.py [#3412])
2668 # urllib chokes on URLs with non-ASCII characters (see http://bugs.python.org/issue3991)
2669 # To work around aforementioned issue we will replace request's original URL with
2670 # percent-encoded one
2671 # Since redirects are also affected (e.g. http://www.southpark.de/alle-episoden/s18e09)
2672 # the code of this workaround has been moved here from YoutubeDL.urlopen()
2673 url = req.get_full_url()
2674 url_escaped = escape_url(url)
2675
2676 # Substitute URL if any change after escaping
2677 if url != url_escaped:
15d260eb 2678 req = update_Request(req, url=url_escaped)
51f267d9 2679
33ac271b 2680 for h, v in std_headers.items():
3d5f7a39
JK
2681 # Capitalize is needed because of Python bug 2275: http://bugs.python.org/issue2275
2682 # The dict keys are capitalized because of this bug by urllib
2683 if h.capitalize() not in req.headers:
33ac271b 2684 req.add_header(h, v)
87f0e62d
YCH
2685
2686 req.headers = handle_youtubedl_headers(req.headers)
989b4b2b
PH
2687
2688 if sys.version_info < (2, 7) and '#' in req.get_full_url():
2689 # Python 2.6 is brain-dead when it comes to fragments
2690 req._Request__original = req._Request__original.partition('#')[0]
2691 req._Request__r_type = req._Request__r_type.partition('#')[0]
2692
59ae15a5
PH
2693 return req
2694
acebc9cd 2695 def http_response(self, req, resp):
59ae15a5
PH
2696 old_resp = resp
2697 # gzip
2698 if resp.headers.get('Content-encoding', '') == 'gzip':
aa3e9507
PH
2699 content = resp.read()
2700 gz = gzip.GzipFile(fileobj=io.BytesIO(content), mode='rb')
2701 try:
2702 uncompressed = io.BytesIO(gz.read())
2703 except IOError as original_ioerror:
2704 # There may be junk add the end of the file
2705 # See http://stackoverflow.com/q/4928560/35070 for details
2706 for i in range(1, 1024):
2707 try:
2708 gz = gzip.GzipFile(fileobj=io.BytesIO(content[:-i]), mode='rb')
2709 uncompressed = io.BytesIO(gz.read())
2710 except IOError:
2711 continue
2712 break
2713 else:
2714 raise original_ioerror
b407d853 2715 resp = compat_urllib_request.addinfourl(uncompressed, old_resp.headers, old_resp.url, old_resp.code)
59ae15a5 2716 resp.msg = old_resp.msg
c047270c 2717 del resp.headers['Content-encoding']
59ae15a5
PH
2718 # deflate
2719 if resp.headers.get('Content-encoding', '') == 'deflate':
2720 gz = io.BytesIO(self.deflate(resp.read()))
b407d853 2721 resp = compat_urllib_request.addinfourl(gz, old_resp.headers, old_resp.url, old_resp.code)
59ae15a5 2722 resp.msg = old_resp.msg
c047270c 2723 del resp.headers['Content-encoding']
ad729172 2724 # Percent-encode redirect URL of Location HTTP header to satisfy RFC 3986 (see
067aa17e 2725 # https://github.com/ytdl-org/youtube-dl/issues/6457).
5a4d9ddb
S
2726 if 300 <= resp.code < 400:
2727 location = resp.headers.get('Location')
2728 if location:
2729 # As of RFC 2616 default charset is iso-8859-1 that is respected by python 3
2730 if sys.version_info >= (3, 0):
2731 location = location.encode('iso-8859-1').decode('utf-8')
0ea59007
YCH
2732 else:
2733 location = location.decode('utf-8')
5a4d9ddb
S
2734 location_escaped = escape_url(location)
2735 if location != location_escaped:
2736 del resp.headers['Location']
9a4aec8b
YCH
2737 if sys.version_info < (3, 0):
2738 location_escaped = location_escaped.encode('utf-8')
5a4d9ddb 2739 resp.headers['Location'] = location_escaped
59ae15a5 2740 return resp
0f8d03f8 2741
acebc9cd
PH
2742 https_request = http_request
2743 https_response = http_response
bf50b038 2744
5de90176 2745
71aff188
YCH
2746def make_socks_conn_class(base_class, socks_proxy):
2747 assert issubclass(base_class, (
2748 compat_http_client.HTTPConnection, compat_http_client.HTTPSConnection))
2749
2750 url_components = compat_urlparse.urlparse(socks_proxy)
2751 if url_components.scheme.lower() == 'socks5':
2752 socks_type = ProxyType.SOCKS5
2753 elif url_components.scheme.lower() in ('socks', 'socks4'):
2754 socks_type = ProxyType.SOCKS4
51fb4995
YCH
2755 elif url_components.scheme.lower() == 'socks4a':
2756 socks_type = ProxyType.SOCKS4A
71aff188 2757
cdd94c2e
YCH
2758 def unquote_if_non_empty(s):
2759 if not s:
2760 return s
2761 return compat_urllib_parse_unquote_plus(s)
2762
71aff188
YCH
2763 proxy_args = (
2764 socks_type,
2765 url_components.hostname, url_components.port or 1080,
2766 True, # Remote DNS
cdd94c2e
YCH
2767 unquote_if_non_empty(url_components.username),
2768 unquote_if_non_empty(url_components.password),
71aff188
YCH
2769 )
2770
2771 class SocksConnection(base_class):
2772 def connect(self):
2773 self.sock = sockssocket()
2774 self.sock.setproxy(*proxy_args)
2775 if type(self.timeout) in (int, float):
2776 self.sock.settimeout(self.timeout)
2777 self.sock.connect((self.host, self.port))
2778
2779 if isinstance(self, compat_http_client.HTTPSConnection):
2780 if hasattr(self, '_context'): # Python > 2.6
2781 self.sock = self._context.wrap_socket(
2782 self.sock, server_hostname=self.host)
2783 else:
2784 self.sock = ssl.wrap_socket(self.sock)
2785
2786 return SocksConnection
2787
2788
be4a824d
PH
2789class YoutubeDLHTTPSHandler(compat_urllib_request.HTTPSHandler):
2790 def __init__(self, params, https_conn_class=None, *args, **kwargs):
2791 compat_urllib_request.HTTPSHandler.__init__(self, *args, **kwargs)
2792 self._https_conn_class = https_conn_class or compat_http_client.HTTPSConnection
2793 self._params = params
2794
2795 def https_open(self, req):
4f264c02 2796 kwargs = {}
71aff188
YCH
2797 conn_class = self._https_conn_class
2798
4f264c02
JMF
2799 if hasattr(self, '_context'): # python > 2.6
2800 kwargs['context'] = self._context
2801 if hasattr(self, '_check_hostname'): # python 3.x
2802 kwargs['check_hostname'] = self._check_hostname
71aff188
YCH
2803
2804 socks_proxy = req.headers.get('Ytdl-socks-proxy')
2805 if socks_proxy:
2806 conn_class = make_socks_conn_class(conn_class, socks_proxy)
2807 del req.headers['Ytdl-socks-proxy']
2808
be4a824d 2809 return self.do_open(functools.partial(
71aff188 2810 _create_http_connection, self, conn_class, True),
4f264c02 2811 req, **kwargs)
be4a824d
PH
2812
2813
1bab3437 2814class YoutubeDLCookieJar(compat_cookiejar.MozillaCookieJar):
f1a8511f
S
2815 """
2816 See [1] for cookie file format.
2817
2818 1. https://curl.haxx.se/docs/http-cookies.html
2819 """
e7e62441 2820 _HTTPONLY_PREFIX = '#HttpOnly_'
c380cc28
S
2821 _ENTRY_LEN = 7
2822 _HEADER = '''# Netscape HTTP Cookie File
7a5c1cfe 2823# This file is generated by yt-dlp. Do not edit.
c380cc28
S
2824
2825'''
2826 _CookieFileEntry = collections.namedtuple(
2827 'CookieFileEntry',
2828 ('domain_name', 'include_subdomains', 'path', 'https_only', 'expires_at', 'name', 'value'))
e7e62441 2829
1bab3437 2830 def save(self, filename=None, ignore_discard=False, ignore_expires=False):
c380cc28
S
2831 """
2832 Save cookies to a file.
2833
2834 Most of the code is taken from CPython 3.8 and slightly adapted
2835 to support cookie files with UTF-8 in both python 2 and 3.
2836 """
2837 if filename is None:
2838 if self.filename is not None:
2839 filename = self.filename
2840 else:
2841 raise ValueError(compat_cookiejar.MISSING_FILENAME_TEXT)
2842
1bab3437
S
2843 # Store session cookies with `expires` set to 0 instead of an empty
2844 # string
2845 for cookie in self:
2846 if cookie.expires is None:
2847 cookie.expires = 0
c380cc28
S
2848
2849 with io.open(filename, 'w', encoding='utf-8') as f:
2850 f.write(self._HEADER)
2851 now = time.time()
2852 for cookie in self:
2853 if not ignore_discard and cookie.discard:
2854 continue
2855 if not ignore_expires and cookie.is_expired(now):
2856 continue
2857 if cookie.secure:
2858 secure = 'TRUE'
2859 else:
2860 secure = 'FALSE'
2861 if cookie.domain.startswith('.'):
2862 initial_dot = 'TRUE'
2863 else:
2864 initial_dot = 'FALSE'
2865 if cookie.expires is not None:
2866 expires = compat_str(cookie.expires)
2867 else:
2868 expires = ''
2869 if cookie.value is None:
2870 # cookies.txt regards 'Set-Cookie: foo' as a cookie
2871 # with no name, whereas http.cookiejar regards it as a
2872 # cookie with no value.
2873 name = ''
2874 value = cookie.name
2875 else:
2876 name = cookie.name
2877 value = cookie.value
2878 f.write(
2879 '\t'.join([cookie.domain, initial_dot, cookie.path,
2880 secure, expires, name, value]) + '\n')
1bab3437
S
2881
2882 def load(self, filename=None, ignore_discard=False, ignore_expires=False):
e7e62441 2883 """Load cookies from a file."""
2884 if filename is None:
2885 if self.filename is not None:
2886 filename = self.filename
2887 else:
2888 raise ValueError(compat_cookiejar.MISSING_FILENAME_TEXT)
2889
c380cc28
S
2890 def prepare_line(line):
2891 if line.startswith(self._HTTPONLY_PREFIX):
2892 line = line[len(self._HTTPONLY_PREFIX):]
2893 # comments and empty lines are fine
2894 if line.startswith('#') or not line.strip():
2895 return line
2896 cookie_list = line.split('\t')
2897 if len(cookie_list) != self._ENTRY_LEN:
2898 raise compat_cookiejar.LoadError('invalid length %d' % len(cookie_list))
2899 cookie = self._CookieFileEntry(*cookie_list)
2900 if cookie.expires_at and not cookie.expires_at.isdigit():
2901 raise compat_cookiejar.LoadError('invalid expires at %s' % cookie.expires_at)
2902 return line
2903
e7e62441 2904 cf = io.StringIO()
c380cc28 2905 with io.open(filename, encoding='utf-8') as f:
e7e62441 2906 for line in f:
c380cc28
S
2907 try:
2908 cf.write(prepare_line(line))
2909 except compat_cookiejar.LoadError as e:
2910 write_string(
2911 'WARNING: skipping cookie file entry due to %s: %r\n'
2912 % (e, line), sys.stderr)
2913 continue
e7e62441 2914 cf.seek(0)
2915 self._really_load(cf, filename, ignore_discard, ignore_expires)
1bab3437
S
2916 # Session cookies are denoted by either `expires` field set to
2917 # an empty string or 0. MozillaCookieJar only recognizes the former
2918 # (see [1]). So we need force the latter to be recognized as session
2919 # cookies on our own.
2920 # Session cookies may be important for cookies-based authentication,
2921 # e.g. usually, when user does not check 'Remember me' check box while
2922 # logging in on a site, some important cookies are stored as session
2923 # cookies so that not recognizing them will result in failed login.
2924 # 1. https://bugs.python.org/issue17164
2925 for cookie in self:
2926 # Treat `expires=0` cookies as session cookies
2927 if cookie.expires == 0:
2928 cookie.expires = None
2929 cookie.discard = True
2930
2931
a6420bf5
S
2932class YoutubeDLCookieProcessor(compat_urllib_request.HTTPCookieProcessor):
2933 def __init__(self, cookiejar=None):
2934 compat_urllib_request.HTTPCookieProcessor.__init__(self, cookiejar)
2935
2936 def http_response(self, request, response):
2937 # Python 2 will choke on next HTTP request in row if there are non-ASCII
2938 # characters in Set-Cookie HTTP header of last response (see
067aa17e 2939 # https://github.com/ytdl-org/youtube-dl/issues/6769).
a6420bf5
S
2940 # In order to at least prevent crashing we will percent encode Set-Cookie
2941 # header before HTTPCookieProcessor starts processing it.
e28034c5
S
2942 # if sys.version_info < (3, 0) and response.headers:
2943 # for set_cookie_header in ('Set-Cookie', 'Set-Cookie2'):
2944 # set_cookie = response.headers.get(set_cookie_header)
2945 # if set_cookie:
2946 # set_cookie_escaped = compat_urllib_parse.quote(set_cookie, b"%/;:@&=+$,!~*'()?#[] ")
2947 # if set_cookie != set_cookie_escaped:
2948 # del response.headers[set_cookie_header]
2949 # response.headers[set_cookie_header] = set_cookie_escaped
a6420bf5
S
2950 return compat_urllib_request.HTTPCookieProcessor.http_response(self, request, response)
2951
f5fa042c 2952 https_request = compat_urllib_request.HTTPCookieProcessor.http_request
a6420bf5
S
2953 https_response = http_response
2954
2955
fca6dba8 2956class YoutubeDLRedirectHandler(compat_urllib_request.HTTPRedirectHandler):
201c1459 2957 """YoutubeDL redirect handler
2958
2959 The code is based on HTTPRedirectHandler implementation from CPython [1].
2960
2961 This redirect handler solves two issues:
2962 - ensures redirect URL is always unicode under python 2
2963 - introduces support for experimental HTTP response status code
2964 308 Permanent Redirect [2] used by some sites [3]
2965
2966 1. https://github.com/python/cpython/blob/master/Lib/urllib/request.py
2967 2. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/308
2968 3. https://github.com/ytdl-org/youtube-dl/issues/28768
2969 """
2970
2971 http_error_301 = http_error_303 = http_error_307 = http_error_308 = compat_urllib_request.HTTPRedirectHandler.http_error_302
2972
2973 def redirect_request(self, req, fp, code, msg, headers, newurl):
2974 """Return a Request or None in response to a redirect.
2975
2976 This is called by the http_error_30x methods when a
2977 redirection response is received. If a redirection should
2978 take place, return a new Request to allow http_error_30x to
2979 perform the redirect. Otherwise, raise HTTPError if no-one
2980 else should try to handle this url. Return None if you can't
2981 but another Handler might.
2982 """
2983 m = req.get_method()
2984 if (not (code in (301, 302, 303, 307, 308) and m in ("GET", "HEAD")
2985 or code in (301, 302, 303) and m == "POST")):
2986 raise compat_HTTPError(req.full_url, code, msg, headers, fp)
2987 # Strictly (according to RFC 2616), 301 or 302 in response to
2988 # a POST MUST NOT cause a redirection without confirmation
2989 # from the user (of urllib.request, in this case). In practice,
2990 # essentially all clients do redirect in this case, so we do
2991 # the same.
2992
2993 # On python 2 urlh.geturl() may sometimes return redirect URL
2994 # as byte string instead of unicode. This workaround allows
2995 # to force it always return unicode.
2996 if sys.version_info[0] < 3:
2997 newurl = compat_str(newurl)
2998
2999 # Be conciliant with URIs containing a space. This is mainly
3000 # redundant with the more complete encoding done in http_error_302(),
3001 # but it is kept for compatibility with other callers.
3002 newurl = newurl.replace(' ', '%20')
3003
3004 CONTENT_HEADERS = ("content-length", "content-type")
3005 # NB: don't use dict comprehension for python 2.6 compatibility
3006 newheaders = dict((k, v) for k, v in req.headers.items()
3007 if k.lower() not in CONTENT_HEADERS)
3008 return compat_urllib_request.Request(
3009 newurl, headers=newheaders, origin_req_host=req.origin_req_host,
3010 unverifiable=True)
fca6dba8
S
3011
3012
46f59e89
S
3013def extract_timezone(date_str):
3014 m = re.search(
3015 r'^.{8,}?(?P<tz>Z$| ?(?P<sign>\+|-)(?P<hours>[0-9]{2}):?(?P<minutes>[0-9]{2})$)',
3016 date_str)
3017 if not m:
3018 timezone = datetime.timedelta()
3019 else:
3020 date_str = date_str[:-len(m.group('tz'))]
3021 if not m.group('sign'):
3022 timezone = datetime.timedelta()
3023 else:
3024 sign = 1 if m.group('sign') == '+' else -1
3025 timezone = datetime.timedelta(
3026 hours=sign * int(m.group('hours')),
3027 minutes=sign * int(m.group('minutes')))
3028 return timezone, date_str
3029
3030
08b38d54 3031def parse_iso8601(date_str, delimiter='T', timezone=None):
912b38b4
PH
3032 """ Return a UNIX timestamp from the given date """
3033
3034 if date_str is None:
3035 return None
3036
52c3a6e4
S
3037 date_str = re.sub(r'\.[0-9]+', '', date_str)
3038
08b38d54 3039 if timezone is None:
46f59e89
S
3040 timezone, date_str = extract_timezone(date_str)
3041
52c3a6e4
S
3042 try:
3043 date_format = '%Y-%m-%d{0}%H:%M:%S'.format(delimiter)
3044 dt = datetime.datetime.strptime(date_str, date_format) - timezone
3045 return calendar.timegm(dt.timetuple())
3046 except ValueError:
3047 pass
912b38b4
PH
3048
3049
46f59e89
S
3050def date_formats(day_first=True):
3051 return DATE_FORMATS_DAY_FIRST if day_first else DATE_FORMATS_MONTH_FIRST
3052
3053
42bdd9d0 3054def unified_strdate(date_str, day_first=True):
bf50b038 3055 """Return a string with the date in the format YYYYMMDD"""
64e7ad60
PH
3056
3057 if date_str is None:
3058 return None
bf50b038 3059 upload_date = None
5f6a1245 3060 # Replace commas
026fcc04 3061 date_str = date_str.replace(',', ' ')
42bdd9d0 3062 # Remove AM/PM + timezone
9bb8e0a3 3063 date_str = re.sub(r'(?i)\s*(?:AM|PM)(?:\s+[A-Z]+)?', '', date_str)
46f59e89 3064 _, date_str = extract_timezone(date_str)
42bdd9d0 3065
46f59e89 3066 for expression in date_formats(day_first):
bf50b038
JMF
3067 try:
3068 upload_date = datetime.datetime.strptime(date_str, expression).strftime('%Y%m%d')
5de90176 3069 except ValueError:
bf50b038 3070 pass
42393ce2
PH
3071 if upload_date is None:
3072 timetuple = email.utils.parsedate_tz(date_str)
3073 if timetuple:
c6b9cf05
S
3074 try:
3075 upload_date = datetime.datetime(*timetuple[:6]).strftime('%Y%m%d')
3076 except ValueError:
3077 pass
6a750402
JMF
3078 if upload_date is not None:
3079 return compat_str(upload_date)
bf50b038 3080
5f6a1245 3081
46f59e89
S
3082def unified_timestamp(date_str, day_first=True):
3083 if date_str is None:
3084 return None
3085
2ae2ffda 3086 date_str = re.sub(r'[,|]', '', date_str)
46f59e89 3087
7dc2a74e 3088 pm_delta = 12 if re.search(r'(?i)PM', date_str) else 0
46f59e89
S
3089 timezone, date_str = extract_timezone(date_str)
3090
3091 # Remove AM/PM + timezone
3092 date_str = re.sub(r'(?i)\s*(?:AM|PM)(?:\s+[A-Z]+)?', '', date_str)
3093
deef3195
S
3094 # Remove unrecognized timezones from ISO 8601 alike timestamps
3095 m = re.search(r'\d{1,2}:\d{1,2}(?:\.\d+)?(?P<tz>\s*[A-Z]+)$', date_str)
3096 if m:
3097 date_str = date_str[:-len(m.group('tz'))]
3098
f226880c
PH
3099 # Python only supports microseconds, so remove nanoseconds
3100 m = re.search(r'^([0-9]{4,}-[0-9]{1,2}-[0-9]{1,2}T[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}\.[0-9]{6})[0-9]+$', date_str)
3101 if m:
3102 date_str = m.group(1)
3103
46f59e89
S
3104 for expression in date_formats(day_first):
3105 try:
7dc2a74e 3106 dt = datetime.datetime.strptime(date_str, expression) - timezone + datetime.timedelta(hours=pm_delta)
46f59e89
S
3107 return calendar.timegm(dt.timetuple())
3108 except ValueError:
3109 pass
3110 timetuple = email.utils.parsedate_tz(date_str)
3111 if timetuple:
7dc2a74e 3112 return calendar.timegm(timetuple) + pm_delta * 3600
46f59e89
S
3113
3114
28e614de 3115def determine_ext(url, default_ext='unknown_video'):
85750f89 3116 if url is None or '.' not in url:
f4776371 3117 return default_ext
9cb9a5df 3118 guess = url.partition('?')[0].rpartition('.')[2]
73e79f2a
PH
3119 if re.match(r'^[A-Za-z0-9]+$', guess):
3120 return guess
a7aaa398
S
3121 # Try extract ext from URLs like http://example.com/foo/bar.mp4/?download
3122 elif guess.rstrip('/') in KNOWN_EXTENSIONS:
9cb9a5df 3123 return guess.rstrip('/')
73e79f2a 3124 else:
cbdbb766 3125 return default_ext
73e79f2a 3126
5f6a1245 3127
824fa511
S
3128def subtitles_filename(filename, sub_lang, sub_format, expected_real_ext=None):
3129 return replace_extension(filename, sub_lang + '.' + sub_format, expected_real_ext)
d4051a8e 3130
5f6a1245 3131
9e62f283 3132def datetime_from_str(date_str, precision='auto', format='%Y%m%d'):
37254abc
JMF
3133 """
3134 Return a datetime object from a string in the format YYYYMMDD or
9e62f283 3135 (now|today|date)[+-][0-9](microsecond|second|minute|hour|day|week|month|year)(s)?
3136
3137 format: string date format used to return datetime object from
3138 precision: round the time portion of a datetime object.
3139 auto|microsecond|second|minute|hour|day.
3140 auto: round to the unit provided in date_str (if applicable).
3141 """
3142 auto_precision = False
3143 if precision == 'auto':
3144 auto_precision = True
3145 precision = 'microsecond'
3146 today = datetime_round(datetime.datetime.now(), precision)
f8795e10 3147 if date_str in ('now', 'today'):
37254abc 3148 return today
f8795e10
PH
3149 if date_str == 'yesterday':
3150 return today - datetime.timedelta(days=1)
9e62f283 3151 match = re.match(
3152 r'(?P<start>.+)(?P<sign>[+-])(?P<time>\d+)(?P<unit>microsecond|second|minute|hour|day|week|month|year)(s)?',
3153 date_str)
37254abc 3154 if match is not None:
9e62f283 3155 start_time = datetime_from_str(match.group('start'), precision, format)
3156 time = int(match.group('time')) * (-1 if match.group('sign') == '-' else 1)
37254abc 3157 unit = match.group('unit')
9e62f283 3158 if unit == 'month' or unit == 'year':
3159 new_date = datetime_add_months(start_time, time * 12 if unit == 'year' else time)
37254abc 3160 unit = 'day'
9e62f283 3161 else:
3162 if unit == 'week':
3163 unit = 'day'
3164 time *= 7
3165 delta = datetime.timedelta(**{unit + 's': time})
3166 new_date = start_time + delta
3167 if auto_precision:
3168 return datetime_round(new_date, unit)
3169 return new_date
3170
3171 return datetime_round(datetime.datetime.strptime(date_str, format), precision)
3172
3173
3174def date_from_str(date_str, format='%Y%m%d'):
3175 """
3176 Return a datetime object from a string in the format YYYYMMDD or
3177 (now|today|date)[+-][0-9](microsecond|second|minute|hour|day|week|month|year)(s)?
3178
3179 format: string date format used to return datetime object from
3180 """
3181 return datetime_from_str(date_str, precision='microsecond', format=format).date()
3182
3183
3184def datetime_add_months(dt, months):
3185 """Increment/Decrement a datetime object by months."""
3186 month = dt.month + months - 1
3187 year = dt.year + month // 12
3188 month = month % 12 + 1
3189 day = min(dt.day, calendar.monthrange(year, month)[1])
3190 return dt.replace(year, month, day)
3191
3192
3193def datetime_round(dt, precision='day'):
3194 """
3195 Round a datetime object's time to a specific precision
3196 """
3197 if precision == 'microsecond':
3198 return dt
3199
3200 unit_seconds = {
3201 'day': 86400,
3202 'hour': 3600,
3203 'minute': 60,
3204 'second': 1,
3205 }
3206 roundto = lambda x, n: ((x + n / 2) // n) * n
3207 timestamp = calendar.timegm(dt.timetuple())
3208 return datetime.datetime.utcfromtimestamp(roundto(timestamp, unit_seconds[precision]))
5f6a1245
JW
3209
3210
e63fc1be 3211def hyphenate_date(date_str):
3212 """
3213 Convert a date in 'YYYYMMDD' format to 'YYYY-MM-DD' format"""
3214 match = re.match(r'^(\d\d\d\d)(\d\d)(\d\d)$', date_str)
3215 if match is not None:
3216 return '-'.join(match.groups())
3217 else:
3218 return date_str
3219
5f6a1245 3220
bd558525
JMF
3221class DateRange(object):
3222 """Represents a time interval between two dates"""
5f6a1245 3223
bd558525
JMF
3224 def __init__(self, start=None, end=None):
3225 """start and end must be strings in the format accepted by date"""
3226 if start is not None:
3227 self.start = date_from_str(start)
3228 else:
3229 self.start = datetime.datetime.min.date()
3230 if end is not None:
3231 self.end = date_from_str(end)
3232 else:
3233 self.end = datetime.datetime.max.date()
37254abc 3234 if self.start > self.end:
bd558525 3235 raise ValueError('Date range: "%s" , the start date must be before the end date' % self)
5f6a1245 3236
bd558525
JMF
3237 @classmethod
3238 def day(cls, day):
3239 """Returns a range that only contains the given day"""
5f6a1245
JW
3240 return cls(day, day)
3241
bd558525
JMF
3242 def __contains__(self, date):
3243 """Check if the date is in the range"""
37254abc
JMF
3244 if not isinstance(date, datetime.date):
3245 date = date_from_str(date)
3246 return self.start <= date <= self.end
5f6a1245 3247
bd558525 3248 def __str__(self):
5f6a1245 3249 return '%s - %s' % (self.start.isoformat(), self.end.isoformat())
c496ca96
PH
3250
3251
3252def platform_name():
3253 """ Returns the platform name as a compat_str """
3254 res = platform.platform()
3255 if isinstance(res, bytes):
3256 res = res.decode(preferredencoding())
3257
3258 assert isinstance(res, compat_str)
3259 return res
c257baff
PH
3260
3261
b58ddb32
PH
3262def _windows_write_string(s, out):
3263 """ Returns True if the string was written using special methods,
3264 False if it has yet to be written out."""
3265 # Adapted from http://stackoverflow.com/a/3259271/35070
3266
3267 import ctypes
3268 import ctypes.wintypes
3269
3270 WIN_OUTPUT_IDS = {
3271 1: -11,
3272 2: -12,
3273 }
3274
a383a98a
PH
3275 try:
3276 fileno = out.fileno()
3277 except AttributeError:
3278 # If the output stream doesn't have a fileno, it's virtual
3279 return False
aa42e873
PH
3280 except io.UnsupportedOperation:
3281 # Some strange Windows pseudo files?
3282 return False
b58ddb32
PH
3283 if fileno not in WIN_OUTPUT_IDS:
3284 return False
3285
d7cd9a9e 3286 GetStdHandle = compat_ctypes_WINFUNCTYPE(
b58ddb32 3287 ctypes.wintypes.HANDLE, ctypes.wintypes.DWORD)(
d7cd9a9e 3288 ('GetStdHandle', ctypes.windll.kernel32))
b58ddb32
PH
3289 h = GetStdHandle(WIN_OUTPUT_IDS[fileno])
3290
d7cd9a9e 3291 WriteConsoleW = compat_ctypes_WINFUNCTYPE(
b58ddb32
PH
3292 ctypes.wintypes.BOOL, ctypes.wintypes.HANDLE, ctypes.wintypes.LPWSTR,
3293 ctypes.wintypes.DWORD, ctypes.POINTER(ctypes.wintypes.DWORD),
d7cd9a9e 3294 ctypes.wintypes.LPVOID)(('WriteConsoleW', ctypes.windll.kernel32))
b58ddb32
PH
3295 written = ctypes.wintypes.DWORD(0)
3296
d7cd9a9e 3297 GetFileType = compat_ctypes_WINFUNCTYPE(ctypes.wintypes.DWORD, ctypes.wintypes.DWORD)(('GetFileType', ctypes.windll.kernel32))
b58ddb32
PH
3298 FILE_TYPE_CHAR = 0x0002
3299 FILE_TYPE_REMOTE = 0x8000
d7cd9a9e 3300 GetConsoleMode = compat_ctypes_WINFUNCTYPE(
b58ddb32
PH
3301 ctypes.wintypes.BOOL, ctypes.wintypes.HANDLE,
3302 ctypes.POINTER(ctypes.wintypes.DWORD))(
d7cd9a9e 3303 ('GetConsoleMode', ctypes.windll.kernel32))
b58ddb32
PH
3304 INVALID_HANDLE_VALUE = ctypes.wintypes.DWORD(-1).value
3305
3306 def not_a_console(handle):
3307 if handle == INVALID_HANDLE_VALUE or handle is None:
3308 return True
3089bc74
S
3309 return ((GetFileType(handle) & ~FILE_TYPE_REMOTE) != FILE_TYPE_CHAR
3310 or GetConsoleMode(handle, ctypes.byref(ctypes.wintypes.DWORD())) == 0)
b58ddb32
PH
3311
3312 if not_a_console(h):
3313 return False
3314
d1b9c912
PH
3315 def next_nonbmp_pos(s):
3316 try:
3317 return next(i for i, c in enumerate(s) if ord(c) > 0xffff)
3318 except StopIteration:
3319 return len(s)
3320
3321 while s:
3322 count = min(next_nonbmp_pos(s), 1024)
3323
b58ddb32 3324 ret = WriteConsoleW(
d1b9c912 3325 h, s, count if count else 2, ctypes.byref(written), None)
b58ddb32
PH
3326 if ret == 0:
3327 raise OSError('Failed to write string')
d1b9c912
PH
3328 if not count: # We just wrote a non-BMP character
3329 assert written.value == 2
3330 s = s[1:]
3331 else:
3332 assert written.value > 0
3333 s = s[written.value:]
b58ddb32
PH
3334 return True
3335
3336
734f90bb 3337def write_string(s, out=None, encoding=None):
7459e3a2
PH
3338 if out is None:
3339 out = sys.stderr
8bf48f23 3340 assert type(s) == compat_str
7459e3a2 3341
b58ddb32
PH
3342 if sys.platform == 'win32' and encoding is None and hasattr(out, 'fileno'):
3343 if _windows_write_string(s, out):
3344 return
3345
3089bc74
S
3346 if ('b' in getattr(out, 'mode', '')
3347 or sys.version_info[0] < 3): # Python 2 lies about mode of sys.stderr
104aa738
PH
3348 byt = s.encode(encoding or preferredencoding(), 'ignore')
3349 out.write(byt)
3350 elif hasattr(out, 'buffer'):
3351 enc = encoding or getattr(out, 'encoding', None) or preferredencoding()
3352 byt = s.encode(enc, 'ignore')
3353 out.buffer.write(byt)
3354 else:
8bf48f23 3355 out.write(s)
7459e3a2
PH
3356 out.flush()
3357
3358
48ea9cea
PH
3359def bytes_to_intlist(bs):
3360 if not bs:
3361 return []
3362 if isinstance(bs[0], int): # Python 3
3363 return list(bs)
3364 else:
3365 return [ord(c) for c in bs]
3366
c257baff 3367
cba892fa 3368def intlist_to_bytes(xs):
3369 if not xs:
3370 return b''
edaa23f8 3371 return compat_struct_pack('%dB' % len(xs), *xs)
c38b1e77
PH
3372
3373
c1c9a79c
PH
3374# Cross-platform file locking
3375if sys.platform == 'win32':
3376 import ctypes.wintypes
3377 import msvcrt
3378
3379 class OVERLAPPED(ctypes.Structure):
3380 _fields_ = [
3381 ('Internal', ctypes.wintypes.LPVOID),
3382 ('InternalHigh', ctypes.wintypes.LPVOID),
3383 ('Offset', ctypes.wintypes.DWORD),
3384 ('OffsetHigh', ctypes.wintypes.DWORD),
3385 ('hEvent', ctypes.wintypes.HANDLE),
3386 ]
3387
3388 kernel32 = ctypes.windll.kernel32
3389 LockFileEx = kernel32.LockFileEx
3390 LockFileEx.argtypes = [
3391 ctypes.wintypes.HANDLE, # hFile
3392 ctypes.wintypes.DWORD, # dwFlags
3393 ctypes.wintypes.DWORD, # dwReserved
3394 ctypes.wintypes.DWORD, # nNumberOfBytesToLockLow
3395 ctypes.wintypes.DWORD, # nNumberOfBytesToLockHigh
3396 ctypes.POINTER(OVERLAPPED) # Overlapped
3397 ]
3398 LockFileEx.restype = ctypes.wintypes.BOOL
3399 UnlockFileEx = kernel32.UnlockFileEx
3400 UnlockFileEx.argtypes = [
3401 ctypes.wintypes.HANDLE, # hFile
3402 ctypes.wintypes.DWORD, # dwReserved
3403 ctypes.wintypes.DWORD, # nNumberOfBytesToLockLow
3404 ctypes.wintypes.DWORD, # nNumberOfBytesToLockHigh
3405 ctypes.POINTER(OVERLAPPED) # Overlapped
3406 ]
3407 UnlockFileEx.restype = ctypes.wintypes.BOOL
3408 whole_low = 0xffffffff
3409 whole_high = 0x7fffffff
3410
3411 def _lock_file(f, exclusive):
3412 overlapped = OVERLAPPED()
3413 overlapped.Offset = 0
3414 overlapped.OffsetHigh = 0
3415 overlapped.hEvent = 0
3416 f._lock_file_overlapped_p = ctypes.pointer(overlapped)
3417 handle = msvcrt.get_osfhandle(f.fileno())
3418 if not LockFileEx(handle, 0x2 if exclusive else 0x0, 0,
3419 whole_low, whole_high, f._lock_file_overlapped_p):
3420 raise OSError('Locking file failed: %r' % ctypes.FormatError())
3421
3422 def _unlock_file(f):
3423 assert f._lock_file_overlapped_p
3424 handle = msvcrt.get_osfhandle(f.fileno())
3425 if not UnlockFileEx(handle, 0,
3426 whole_low, whole_high, f._lock_file_overlapped_p):
3427 raise OSError('Unlocking file failed: %r' % ctypes.FormatError())
3428
3429else:
399a76e6
YCH
3430 # Some platforms, such as Jython, is missing fcntl
3431 try:
3432 import fcntl
c1c9a79c 3433
399a76e6
YCH
3434 def _lock_file(f, exclusive):
3435 fcntl.flock(f, fcntl.LOCK_EX if exclusive else fcntl.LOCK_SH)
c1c9a79c 3436
399a76e6
YCH
3437 def _unlock_file(f):
3438 fcntl.flock(f, fcntl.LOCK_UN)
3439 except ImportError:
3440 UNSUPPORTED_MSG = 'file locking is not supported on this platform'
3441
3442 def _lock_file(f, exclusive):
3443 raise IOError(UNSUPPORTED_MSG)
3444
3445 def _unlock_file(f):
3446 raise IOError(UNSUPPORTED_MSG)
c1c9a79c
PH
3447
3448
3449class locked_file(object):
3450 def __init__(self, filename, mode, encoding=None):
3451 assert mode in ['r', 'a', 'w']
3452 self.f = io.open(filename, mode, encoding=encoding)
3453 self.mode = mode
3454
3455 def __enter__(self):
3456 exclusive = self.mode != 'r'
3457 try:
3458 _lock_file(self.f, exclusive)
3459 except IOError:
3460 self.f.close()
3461 raise
3462 return self
3463
3464 def __exit__(self, etype, value, traceback):
3465 try:
3466 _unlock_file(self.f)
3467 finally:
3468 self.f.close()
3469
3470 def __iter__(self):
3471 return iter(self.f)
3472
3473 def write(self, *args):
3474 return self.f.write(*args)
3475
3476 def read(self, *args):
3477 return self.f.read(*args)
4eb7f1d1
JMF
3478
3479
4644ac55
S
3480def get_filesystem_encoding():
3481 encoding = sys.getfilesystemencoding()
3482 return encoding if encoding is not None else 'utf-8'
3483
3484
4eb7f1d1 3485def shell_quote(args):
a6a173c2 3486 quoted_args = []
4644ac55 3487 encoding = get_filesystem_encoding()
a6a173c2
JMF
3488 for a in args:
3489 if isinstance(a, bytes):
3490 # We may get a filename encoded with 'encodeFilename'
3491 a = a.decode(encoding)
aefce8e6 3492 quoted_args.append(compat_shlex_quote(a))
28e614de 3493 return ' '.join(quoted_args)
9d4660ca
PH
3494
3495
3496def smuggle_url(url, data):
3497 """ Pass additional data in a URL for internal use. """
3498
81953d1a
RA
3499 url, idata = unsmuggle_url(url, {})
3500 data.update(idata)
15707c7e 3501 sdata = compat_urllib_parse_urlencode(
28e614de
PH
3502 {'__youtubedl_smuggle': json.dumps(data)})
3503 return url + '#' + sdata
9d4660ca
PH
3504
3505
79f82953 3506def unsmuggle_url(smug_url, default=None):
83e865a3 3507 if '#__youtubedl_smuggle' not in smug_url:
79f82953 3508 return smug_url, default
28e614de
PH
3509 url, _, sdata = smug_url.rpartition('#')
3510 jsond = compat_parse_qs(sdata)['__youtubedl_smuggle'][0]
9d4660ca
PH
3511 data = json.loads(jsond)
3512 return url, data
02dbf93f
PH
3513
3514
02dbf93f
PH
3515def format_bytes(bytes):
3516 if bytes is None:
28e614de 3517 return 'N/A'
02dbf93f
PH
3518 if type(bytes) is str:
3519 bytes = float(bytes)
3520 if bytes == 0.0:
3521 exponent = 0
3522 else:
3523 exponent = int(math.log(bytes, 1024.0))
28e614de 3524 suffix = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'][exponent]
02dbf93f 3525 converted = float(bytes) / float(1024 ** exponent)
28e614de 3526 return '%.2f%s' % (converted, suffix)
f53c966a 3527
1c088fa8 3528
fb47597b
S
3529def lookup_unit_table(unit_table, s):
3530 units_re = '|'.join(re.escape(u) for u in unit_table)
3531 m = re.match(
782b1b5b 3532 r'(?P<num>[0-9]+(?:[,.][0-9]*)?)\s*(?P<unit>%s)\b' % units_re, s)
fb47597b
S
3533 if not m:
3534 return None
3535 num_str = m.group('num').replace(',', '.')
3536 mult = unit_table[m.group('unit')]
3537 return int(float(num_str) * mult)
3538
3539
be64b5b0
PH
3540def parse_filesize(s):
3541 if s is None:
3542 return None
3543
dfb1b146 3544 # The lower-case forms are of course incorrect and unofficial,
be64b5b0
PH
3545 # but we support those too
3546 _UNIT_TABLE = {
3547 'B': 1,
3548 'b': 1,
70852b47 3549 'bytes': 1,
be64b5b0
PH
3550 'KiB': 1024,
3551 'KB': 1000,
3552 'kB': 1024,
3553 'Kb': 1000,
13585d76 3554 'kb': 1000,
70852b47
YCH
3555 'kilobytes': 1000,
3556 'kibibytes': 1024,
be64b5b0
PH
3557 'MiB': 1024 ** 2,
3558 'MB': 1000 ** 2,
3559 'mB': 1024 ** 2,
3560 'Mb': 1000 ** 2,
13585d76 3561 'mb': 1000 ** 2,
70852b47
YCH
3562 'megabytes': 1000 ** 2,
3563 'mebibytes': 1024 ** 2,
be64b5b0
PH
3564 'GiB': 1024 ** 3,
3565 'GB': 1000 ** 3,
3566 'gB': 1024 ** 3,
3567 'Gb': 1000 ** 3,
13585d76 3568 'gb': 1000 ** 3,
70852b47
YCH
3569 'gigabytes': 1000 ** 3,
3570 'gibibytes': 1024 ** 3,
be64b5b0
PH
3571 'TiB': 1024 ** 4,
3572 'TB': 1000 ** 4,
3573 'tB': 1024 ** 4,
3574 'Tb': 1000 ** 4,
13585d76 3575 'tb': 1000 ** 4,
70852b47
YCH
3576 'terabytes': 1000 ** 4,
3577 'tebibytes': 1024 ** 4,
be64b5b0
PH
3578 'PiB': 1024 ** 5,
3579 'PB': 1000 ** 5,
3580 'pB': 1024 ** 5,
3581 'Pb': 1000 ** 5,
13585d76 3582 'pb': 1000 ** 5,
70852b47
YCH
3583 'petabytes': 1000 ** 5,
3584 'pebibytes': 1024 ** 5,
be64b5b0
PH
3585 'EiB': 1024 ** 6,
3586 'EB': 1000 ** 6,
3587 'eB': 1024 ** 6,
3588 'Eb': 1000 ** 6,
13585d76 3589 'eb': 1000 ** 6,
70852b47
YCH
3590 'exabytes': 1000 ** 6,
3591 'exbibytes': 1024 ** 6,
be64b5b0
PH
3592 'ZiB': 1024 ** 7,
3593 'ZB': 1000 ** 7,
3594 'zB': 1024 ** 7,
3595 'Zb': 1000 ** 7,
13585d76 3596 'zb': 1000 ** 7,
70852b47
YCH
3597 'zettabytes': 1000 ** 7,
3598 'zebibytes': 1024 ** 7,
be64b5b0
PH
3599 'YiB': 1024 ** 8,
3600 'YB': 1000 ** 8,
3601 'yB': 1024 ** 8,
3602 'Yb': 1000 ** 8,
13585d76 3603 'yb': 1000 ** 8,
70852b47
YCH
3604 'yottabytes': 1000 ** 8,
3605 'yobibytes': 1024 ** 8,
be64b5b0
PH
3606 }
3607
fb47597b
S
3608 return lookup_unit_table(_UNIT_TABLE, s)
3609
3610
3611def parse_count(s):
3612 if s is None:
be64b5b0
PH
3613 return None
3614
fb47597b
S
3615 s = s.strip()
3616
3617 if re.match(r'^[\d,.]+$', s):
3618 return str_to_int(s)
3619
3620 _UNIT_TABLE = {
3621 'k': 1000,
3622 'K': 1000,
3623 'm': 1000 ** 2,
3624 'M': 1000 ** 2,
3625 'kk': 1000 ** 2,
3626 'KK': 1000 ** 2,
3627 }
be64b5b0 3628
fb47597b 3629 return lookup_unit_table(_UNIT_TABLE, s)
be64b5b0 3630
2f7ae819 3631
b871d7e9
S
3632def parse_resolution(s):
3633 if s is None:
3634 return {}
3635
3636 mobj = re.search(r'\b(?P<w>\d+)\s*[xX×]\s*(?P<h>\d+)\b', s)
3637 if mobj:
3638 return {
3639 'width': int(mobj.group('w')),
3640 'height': int(mobj.group('h')),
3641 }
3642
3643 mobj = re.search(r'\b(\d+)[pPiI]\b', s)
3644 if mobj:
3645 return {'height': int(mobj.group(1))}
3646
3647 mobj = re.search(r'\b([48])[kK]\b', s)
3648 if mobj:
3649 return {'height': int(mobj.group(1)) * 540}
3650
3651 return {}
3652
3653
0dc41787
S
3654def parse_bitrate(s):
3655 if not isinstance(s, compat_str):
3656 return
3657 mobj = re.search(r'\b(\d+)\s*kbps', s)
3658 if mobj:
3659 return int(mobj.group(1))
3660
3661
a942d6cb 3662def month_by_name(name, lang='en'):
caefb1de
PH
3663 """ Return the number of a month by (locale-independently) English name """
3664
f6717dec 3665 month_names = MONTH_NAMES.get(lang, MONTH_NAMES['en'])
a942d6cb 3666
caefb1de 3667 try:
f6717dec 3668 return month_names.index(name) + 1
7105440c
YCH
3669 except ValueError:
3670 return None
3671
3672
3673def month_by_abbreviation(abbrev):
3674 """ Return the number of a month by (locale-independently) English
3675 abbreviations """
3676
3677 try:
3678 return [s[:3] for s in ENGLISH_MONTH_NAMES].index(abbrev) + 1
caefb1de
PH
3679 except ValueError:
3680 return None
18258362
JMF
3681
3682
5aafe895 3683def fix_xml_ampersands(xml_str):
18258362 3684 """Replace all the '&' by '&amp;' in XML"""
5aafe895
PH
3685 return re.sub(
3686 r'&(?!amp;|lt;|gt;|apos;|quot;|#x[0-9a-fA-F]{,4};|#[0-9]{,4};)',
28e614de 3687 '&amp;',
5aafe895 3688 xml_str)
e3946f98
PH
3689
3690
3691def setproctitle(title):
8bf48f23 3692 assert isinstance(title, compat_str)
c1c05c67
YCH
3693
3694 # ctypes in Jython is not complete
3695 # http://bugs.jython.org/issue2148
3696 if sys.platform.startswith('java'):
3697 return
3698
e3946f98 3699 try:
611c1dd9 3700 libc = ctypes.cdll.LoadLibrary('libc.so.6')
e3946f98
PH
3701 except OSError:
3702 return
2f49bcd6
RC
3703 except TypeError:
3704 # LoadLibrary in Windows Python 2.7.13 only expects
3705 # a bytestring, but since unicode_literals turns
3706 # every string into a unicode string, it fails.
3707 return
6eefe533
PH
3708 title_bytes = title.encode('utf-8')
3709 buf = ctypes.create_string_buffer(len(title_bytes))
3710 buf.value = title_bytes
e3946f98 3711 try:
6eefe533 3712 libc.prctl(15, buf, 0, 0, 0)
e3946f98
PH
3713 except AttributeError:
3714 return # Strange libc, just skip this
d7dda168
PH
3715
3716
3717def remove_start(s, start):
46bc9b7d 3718 return s[len(start):] if s is not None and s.startswith(start) else s
29eb5174
PH
3719
3720
2b9faf55 3721def remove_end(s, end):
46bc9b7d 3722 return s[:-len(end)] if s is not None and s.endswith(end) else s
2b9faf55
PH
3723
3724
31b2051e
S
3725def remove_quotes(s):
3726 if s is None or len(s) < 2:
3727 return s
3728 for quote in ('"', "'", ):
3729 if s[0] == quote and s[-1] == quote:
3730 return s[1:-1]
3731 return s
3732
3733
b6e0c7d2
U
3734def get_domain(url):
3735 domain = re.match(r'(?:https?:\/\/)?(?:www\.)?(?P<domain>[^\n\/]+\.[^\n\/]+)(?:\/(.*))?', url)
3736 return domain.group('domain') if domain else None
3737
3738
29eb5174 3739def url_basename(url):
9b8aaeed 3740 path = compat_urlparse.urlparse(url).path
28e614de 3741 return path.strip('/').split('/')[-1]
aa94a6d3
PH
3742
3743
02dc0a36
S
3744def base_url(url):
3745 return re.match(r'https?://[^?#&]+/', url).group()
3746
3747
e34c3361 3748def urljoin(base, path):
4b5de77b
S
3749 if isinstance(path, bytes):
3750 path = path.decode('utf-8')
e34c3361
S
3751 if not isinstance(path, compat_str) or not path:
3752 return None
fad4ceb5 3753 if re.match(r'^(?:[a-zA-Z][a-zA-Z0-9+-.]*:)?//', path):
e34c3361 3754 return path
4b5de77b
S
3755 if isinstance(base, bytes):
3756 base = base.decode('utf-8')
3757 if not isinstance(base, compat_str) or not re.match(
3758 r'^(?:https?:)?//', base):
e34c3361
S
3759 return None
3760 return compat_urlparse.urljoin(base, path)
3761
3762
aa94a6d3
PH
3763class HEADRequest(compat_urllib_request.Request):
3764 def get_method(self):
611c1dd9 3765 return 'HEAD'
7217e148
PH
3766
3767
95cf60e8
S
3768class PUTRequest(compat_urllib_request.Request):
3769 def get_method(self):
3770 return 'PUT'
3771
3772
9732d77e 3773def int_or_none(v, scale=1, default=None, get_attr=None, invscale=1):
28746fbd
PH
3774 if get_attr:
3775 if v is not None:
3776 v = getattr(v, get_attr, None)
9572013d
PH
3777 if v == '':
3778 v = None
1812afb7
S
3779 if v is None:
3780 return default
3781 try:
3782 return int(v) * invscale // scale
5e1271c5 3783 except (ValueError, TypeError):
af98f8ff 3784 return default
9732d77e 3785
9572013d 3786
40a90862
JMF
3787def str_or_none(v, default=None):
3788 return default if v is None else compat_str(v)
3789
9732d77e
PH
3790
3791def str_to_int(int_str):
48d4681e 3792 """ A more relaxed version of int_or_none """
42db58ec 3793 if isinstance(int_str, compat_integer_types):
348c6bf1 3794 return int_str
42db58ec
S
3795 elif isinstance(int_str, compat_str):
3796 int_str = re.sub(r'[,\.\+]', '', int_str)
3797 return int_or_none(int_str)
608d11f5
PH
3798
3799
9732d77e 3800def float_or_none(v, scale=1, invscale=1, default=None):
caf80631
S
3801 if v is None:
3802 return default
3803 try:
3804 return float(v) * invscale / scale
5e1271c5 3805 except (ValueError, TypeError):
caf80631 3806 return default
43f775e4
PH
3807
3808
c7e327c4
S
3809def bool_or_none(v, default=None):
3810 return v if isinstance(v, bool) else default
3811
3812
53cd37ba
S
3813def strip_or_none(v, default=None):
3814 return v.strip() if isinstance(v, compat_str) else default
b72b4431
S
3815
3816
af03000a
S
3817def url_or_none(url):
3818 if not url or not isinstance(url, compat_str):
3819 return None
3820 url = url.strip()
29f7c58a 3821 return url if re.match(r'^(?:(?:https?|rt(?:m(?:pt?[es]?|fp)|sp[su]?)|mms|ftps?):)?//', url) else None
af03000a
S
3822
3823
e29663c6 3824def strftime_or_none(timestamp, date_format, default=None):
3825 datetime_object = None
3826 try:
3827 if isinstance(timestamp, compat_numeric_types): # unix timestamp
3828 datetime_object = datetime.datetime.utcfromtimestamp(timestamp)
3829 elif isinstance(timestamp, compat_str): # assume YYYYMMDD
3830 datetime_object = datetime.datetime.strptime(timestamp, '%Y%m%d')
3831 return datetime_object.strftime(date_format)
3832 except (ValueError, TypeError, AttributeError):
3833 return default
3834
3835
608d11f5 3836def parse_duration(s):
8f9312c3 3837 if not isinstance(s, compat_basestring):
608d11f5
PH
3838 return None
3839
ca7b3246
S
3840 s = s.strip()
3841
acaff495 3842 days, hours, mins, secs, ms = [None] * 5
15846398 3843 m = re.match(r'(?:(?:(?:(?P<days>[0-9]+):)?(?P<hours>[0-9]+):)?(?P<mins>[0-9]+):)?(?P<secs>[0-9]+)(?P<ms>\.[0-9]+)?Z?$', s)
acaff495 3844 if m:
3845 days, hours, mins, secs, ms = m.groups()
3846 else:
3847 m = re.match(
056653bb
S
3848 r'''(?ix)(?:P?
3849 (?:
3850 [0-9]+\s*y(?:ears?)?\s*
3851 )?
3852 (?:
3853 [0-9]+\s*m(?:onths?)?\s*
3854 )?
3855 (?:
3856 [0-9]+\s*w(?:eeks?)?\s*
3857 )?
8f4b58d7 3858 (?:
acaff495 3859 (?P<days>[0-9]+)\s*d(?:ays?)?\s*
8f4b58d7 3860 )?
056653bb 3861 T)?
acaff495 3862 (?:
3863 (?P<hours>[0-9]+)\s*h(?:ours?)?\s*
3864 )?
3865 (?:
3866 (?P<mins>[0-9]+)\s*m(?:in(?:ute)?s?)?\s*
3867 )?
3868 (?:
3869 (?P<secs>[0-9]+)(?P<ms>\.[0-9]+)?\s*s(?:ec(?:ond)?s?)?\s*
15846398 3870 )?Z?$''', s)
acaff495 3871 if m:
3872 days, hours, mins, secs, ms = m.groups()
3873 else:
15846398 3874 m = re.match(r'(?i)(?:(?P<hours>[0-9.]+)\s*(?:hours?)|(?P<mins>[0-9.]+)\s*(?:mins?\.?|minutes?)\s*)Z?$', s)
acaff495 3875 if m:
3876 hours, mins = m.groups()
3877 else:
3878 return None
3879
3880 duration = 0
3881 if secs:
3882 duration += float(secs)
3883 if mins:
3884 duration += float(mins) * 60
3885 if hours:
3886 duration += float(hours) * 60 * 60
3887 if days:
3888 duration += float(days) * 24 * 60 * 60
3889 if ms:
3890 duration += float(ms)
3891 return duration
91d7d0b3
JMF
3892
3893
e65e4c88 3894def prepend_extension(filename, ext, expected_real_ext=None):
5f6a1245 3895 name, real_ext = os.path.splitext(filename)
e65e4c88
S
3896 return (
3897 '{0}.{1}{2}'.format(name, ext, real_ext)
3898 if not expected_real_ext or real_ext[1:] == expected_real_ext
3899 else '{0}.{1}'.format(filename, ext))
d70ad093
PH
3900
3901
b3ed15b7
S
3902def replace_extension(filename, ext, expected_real_ext=None):
3903 name, real_ext = os.path.splitext(filename)
3904 return '{0}.{1}'.format(
3905 name if not expected_real_ext or real_ext[1:] == expected_real_ext else filename,
3906 ext)
3907
3908
d70ad093
PH
3909def check_executable(exe, args=[]):
3910 """ Checks if the given binary is installed somewhere in PATH, and returns its name.
3911 args can be a list of arguments for a short output (like -version) """
3912 try:
f5b1bca9 3913 process_communicate_or_kill(subprocess.Popen(
3914 [exe] + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE))
d70ad093
PH
3915 except OSError:
3916 return False
3917 return exe
b7ab0590
PH
3918
3919
95807118 3920def get_exe_version(exe, args=['--version'],
cae97f65 3921 version_re=None, unrecognized='present'):
95807118
PH
3922 """ Returns the version of the specified executable,
3923 or False if the executable is not present """
3924 try:
b64d04c1 3925 # STDIN should be redirected too. On UNIX-like systems, ffmpeg triggers
7a5c1cfe 3926 # SIGTTOU if yt-dlp is run in the background.
067aa17e 3927 # See https://github.com/ytdl-org/youtube-dl/issues/955#issuecomment-209789656
f5b1bca9 3928 out, _ = process_communicate_or_kill(subprocess.Popen(
54116803 3929 [encodeArgument(exe)] + args,
00ca7552 3930 stdin=subprocess.PIPE,
f5b1bca9 3931 stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
95807118
PH
3932 except OSError:
3933 return False
cae97f65
PH
3934 if isinstance(out, bytes): # Python 2.x
3935 out = out.decode('ascii', 'ignore')
3936 return detect_exe_version(out, version_re, unrecognized)
3937
3938
3939def detect_exe_version(output, version_re=None, unrecognized='present'):
3940 assert isinstance(output, compat_str)
3941 if version_re is None:
3942 version_re = r'version\s+([-0-9._a-zA-Z]+)'
3943 m = re.search(version_re, output)
95807118
PH
3944 if m:
3945 return m.group(1)
3946 else:
3947 return unrecognized
3948
3949
483336e7 3950class LazyList(collections.Sequence):
3951 ''' Lazy immutable list from an iterable
3952 Note that slices of a LazyList are lists and not LazyList'''
3953
3954 def __init__(self, iterable):
3955 self.__iterable = iter(iterable)
3956 self.__cache = []
3957
3958 def __iter__(self):
3959 for item in self.__cache:
3960 yield item
3961 for item in self.__iterable:
3962 self.__cache.append(item)
3963 yield item
3964
3965 def exhaust(self):
3966 ''' Evaluate the entire iterable '''
3967 self.__cache.extend(self.__iterable)
3968
3969 def __getitem__(self, idx):
3970 if isinstance(idx, slice):
3971 step = idx.step or 1
3972 start = idx.start if idx.start is not None else 1 if step > 0 else -1
3973 stop = idx.stop if idx.stop is not None else -1 if step > 0 else 0
3974 elif isinstance(idx, int):
3975 start = stop = idx
3976 else:
3977 raise TypeError('indices must be integers or slices')
3978 if start < 0 or stop < 0:
3979 # We need to consume the entire iterable to be able to slice from the end
3980 # Obviously, never use this with infinite iterables
3981 self.exhaust()
3982 else:
3983 n = max(start, stop) - len(self.__cache) + 1
3984 if n > 0:
3985 self.__cache.extend(itertools.islice(self.__iterable, n))
3986 return self.__cache[idx]
3987
3988 def __bool__(self):
3989 try:
3990 self[0]
3991 except IndexError:
3992 return False
3993 return True
3994
3995 def __len__(self):
3996 self.exhaust()
3997 return len(self.__cache)
3998
3999
b7ab0590 4000class PagedList(object):
dd26ced1
PH
4001 def __len__(self):
4002 # This is only useful for tests
4003 return len(self.getslice())
4004
55575225 4005 def getslice(self, start, end):
4006 raise NotImplementedError('This method must be implemented by subclasses')
4007
4008 def __getitem__(self, idx):
4009 if not isinstance(idx, int) or idx < 0:
4010 raise TypeError('indices must be non-negative integers')
4011 entries = self.getslice(idx, idx + 1)
4012 return entries[0] if entries else None
4013
9c44d242
PH
4014
4015class OnDemandPagedList(PagedList):
6be08ce6 4016 def __init__(self, pagefunc, pagesize, use_cache=True):
9c44d242
PH
4017 self._pagefunc = pagefunc
4018 self._pagesize = pagesize
b95dc034
YCH
4019 self._use_cache = use_cache
4020 if use_cache:
4021 self._cache = {}
9c44d242 4022
b7ab0590
PH
4023 def getslice(self, start=0, end=None):
4024 res = []
4025 for pagenum in itertools.count(start // self._pagesize):
4026 firstid = pagenum * self._pagesize
4027 nextfirstid = pagenum * self._pagesize + self._pagesize
4028 if start >= nextfirstid:
4029 continue
4030
b95dc034
YCH
4031 page_results = None
4032 if self._use_cache:
4033 page_results = self._cache.get(pagenum)
4034 if page_results is None:
4035 page_results = list(self._pagefunc(pagenum))
4036 if self._use_cache:
4037 self._cache[pagenum] = page_results
b7ab0590
PH
4038
4039 startv = (
4040 start % self._pagesize
4041 if firstid <= start < nextfirstid
4042 else 0)
4043
4044 endv = (
4045 ((end - 1) % self._pagesize) + 1
4046 if (end is not None and firstid <= end <= nextfirstid)
4047 else None)
4048
4049 if startv != 0 or endv is not None:
4050 page_results = page_results[startv:endv]
4051 res.extend(page_results)
4052
4053 # A little optimization - if current page is not "full", ie. does
4054 # not contain page_size videos then we can assume that this page
4055 # is the last one - there are no more ids on further pages -
4056 # i.e. no need to query again.
4057 if len(page_results) + startv < self._pagesize:
4058 break
4059
4060 # If we got the whole page, but the next page is not interesting,
4061 # break out early as well
4062 if end == nextfirstid:
4063 break
4064 return res
81c2f20b
PH
4065
4066
9c44d242
PH
4067class InAdvancePagedList(PagedList):
4068 def __init__(self, pagefunc, pagecount, pagesize):
4069 self._pagefunc = pagefunc
4070 self._pagecount = pagecount
4071 self._pagesize = pagesize
4072
4073 def getslice(self, start=0, end=None):
4074 res = []
4075 start_page = start // self._pagesize
4076 end_page = (
4077 self._pagecount if end is None else (end // self._pagesize + 1))
4078 skip_elems = start - start_page * self._pagesize
4079 only_more = None if end is None else end - start
4080 for pagenum in range(start_page, end_page):
4081 page = list(self._pagefunc(pagenum))
4082 if skip_elems:
4083 page = page[skip_elems:]
4084 skip_elems = None
4085 if only_more is not None:
4086 if len(page) < only_more:
4087 only_more -= len(page)
4088 else:
4089 page = page[:only_more]
4090 res.extend(page)
4091 break
4092 res.extend(page)
4093 return res
4094
4095
81c2f20b 4096def uppercase_escape(s):
676eb3f2 4097 unicode_escape = codecs.getdecoder('unicode_escape')
81c2f20b 4098 return re.sub(
a612753d 4099 r'\\U[0-9a-fA-F]{8}',
676eb3f2
PH
4100 lambda m: unicode_escape(m.group(0))[0],
4101 s)
0fe2ff78
YCH
4102
4103
4104def lowercase_escape(s):
4105 unicode_escape = codecs.getdecoder('unicode_escape')
4106 return re.sub(
4107 r'\\u[0-9a-fA-F]{4}',
4108 lambda m: unicode_escape(m.group(0))[0],
4109 s)
b53466e1 4110
d05cfe06
S
4111
4112def escape_rfc3986(s):
4113 """Escape non-ASCII characters as suggested by RFC 3986"""
8f9312c3 4114 if sys.version_info < (3, 0) and isinstance(s, compat_str):
d05cfe06 4115 s = s.encode('utf-8')
ecc0c5ee 4116 return compat_urllib_parse.quote(s, b"%/;:@&=+$,!~*'()?#[]")
d05cfe06
S
4117
4118
4119def escape_url(url):
4120 """Escape URL as suggested by RFC 3986"""
4121 url_parsed = compat_urllib_parse_urlparse(url)
4122 return url_parsed._replace(
efbed08d 4123 netloc=url_parsed.netloc.encode('idna').decode('ascii'),
d05cfe06
S
4124 path=escape_rfc3986(url_parsed.path),
4125 params=escape_rfc3986(url_parsed.params),
4126 query=escape_rfc3986(url_parsed.query),
4127 fragment=escape_rfc3986(url_parsed.fragment)
4128 ).geturl()
4129
62e609ab
PH
4130
4131def read_batch_urls(batch_fd):
4132 def fixup(url):
4133 if not isinstance(url, compat_str):
4134 url = url.decode('utf-8', 'replace')
8c04f0be 4135 BOM_UTF8 = ('\xef\xbb\xbf', '\ufeff')
4136 for bom in BOM_UTF8:
4137 if url.startswith(bom):
4138 url = url[len(bom):]
4139 url = url.lstrip()
4140 if not url or url.startswith(('#', ';', ']')):
62e609ab 4141 return False
8c04f0be 4142 # "#" cannot be stripped out since it is part of the URI
4143 # However, it can be safely stipped out if follwing a whitespace
4144 return re.split(r'\s#', url, 1)[0].rstrip()
62e609ab
PH
4145
4146 with contextlib.closing(batch_fd) as fd:
4147 return [url for url in map(fixup, fd) if url]
b74fa8cd
JMF
4148
4149
4150def urlencode_postdata(*args, **kargs):
15707c7e 4151 return compat_urllib_parse_urlencode(*args, **kargs).encode('ascii')
bcf89ce6
PH
4152
4153
38f9ef31 4154def update_url_query(url, query):
cacd9966
YCH
4155 if not query:
4156 return url
38f9ef31 4157 parsed_url = compat_urlparse.urlparse(url)
4158 qs = compat_parse_qs(parsed_url.query)
4159 qs.update(query)
4160 return compat_urlparse.urlunparse(parsed_url._replace(
15707c7e 4161 query=compat_urllib_parse_urlencode(qs, True)))
16392824 4162
8e60dc75 4163
ed0291d1
S
4164def update_Request(req, url=None, data=None, headers={}, query={}):
4165 req_headers = req.headers.copy()
4166 req_headers.update(headers)
4167 req_data = data or req.data
4168 req_url = update_url_query(url or req.get_full_url(), query)
95cf60e8
S
4169 req_get_method = req.get_method()
4170 if req_get_method == 'HEAD':
4171 req_type = HEADRequest
4172 elif req_get_method == 'PUT':
4173 req_type = PUTRequest
4174 else:
4175 req_type = compat_urllib_request.Request
ed0291d1
S
4176 new_req = req_type(
4177 req_url, data=req_data, headers=req_headers,
4178 origin_req_host=req.origin_req_host, unverifiable=req.unverifiable)
4179 if hasattr(req, 'timeout'):
4180 new_req.timeout = req.timeout
4181 return new_req
4182
4183
10c87c15 4184def _multipart_encode_impl(data, boundary):
0c265486
YCH
4185 content_type = 'multipart/form-data; boundary=%s' % boundary
4186
4187 out = b''
4188 for k, v in data.items():
4189 out += b'--' + boundary.encode('ascii') + b'\r\n'
4190 if isinstance(k, compat_str):
4191 k = k.encode('utf-8')
4192 if isinstance(v, compat_str):
4193 v = v.encode('utf-8')
4194 # RFC 2047 requires non-ASCII field names to be encoded, while RFC 7578
4195 # suggests sending UTF-8 directly. Firefox sends UTF-8, too
b2ad479d 4196 content = b'Content-Disposition: form-data; name="' + k + b'"\r\n\r\n' + v + b'\r\n'
0c265486
YCH
4197 if boundary.encode('ascii') in content:
4198 raise ValueError('Boundary overlaps with data')
4199 out += content
4200
4201 out += b'--' + boundary.encode('ascii') + b'--\r\n'
4202
4203 return out, content_type
4204
4205
4206def multipart_encode(data, boundary=None):
4207 '''
4208 Encode a dict to RFC 7578-compliant form-data
4209
4210 data:
4211 A dict where keys and values can be either Unicode or bytes-like
4212 objects.
4213 boundary:
4214 If specified a Unicode object, it's used as the boundary. Otherwise
4215 a random boundary is generated.
4216
4217 Reference: https://tools.ietf.org/html/rfc7578
4218 '''
4219 has_specified_boundary = boundary is not None
4220
4221 while True:
4222 if boundary is None:
4223 boundary = '---------------' + str(random.randrange(0x0fffffff, 0xffffffff))
4224
4225 try:
10c87c15 4226 out, content_type = _multipart_encode_impl(data, boundary)
0c265486
YCH
4227 break
4228 except ValueError:
4229 if has_specified_boundary:
4230 raise
4231 boundary = None
4232
4233 return out, content_type
4234
4235
86296ad2 4236def dict_get(d, key_or_keys, default=None, skip_false_values=True):
cbecc9b9
S
4237 if isinstance(key_or_keys, (list, tuple)):
4238 for key in key_or_keys:
86296ad2
S
4239 if key not in d or d[key] is None or skip_false_values and not d[key]:
4240 continue
4241 return d[key]
cbecc9b9
S
4242 return default
4243 return d.get(key_or_keys, default)
4244
4245
329ca3be 4246def try_get(src, getter, expected_type=None):
a32a9a7e
S
4247 if not isinstance(getter, (list, tuple)):
4248 getter = [getter]
4249 for get in getter:
4250 try:
4251 v = get(src)
4252 except (AttributeError, KeyError, TypeError, IndexError):
4253 pass
4254 else:
4255 if expected_type is None or isinstance(v, expected_type):
4256 return v
329ca3be
S
4257
4258
6cc62232
S
4259def merge_dicts(*dicts):
4260 merged = {}
4261 for a_dict in dicts:
4262 for k, v in a_dict.items():
4263 if v is None:
4264 continue
3089bc74
S
4265 if (k not in merged
4266 or (isinstance(v, compat_str) and v
4267 and isinstance(merged[k], compat_str)
4268 and not merged[k])):
6cc62232
S
4269 merged[k] = v
4270 return merged
4271
4272
8e60dc75
S
4273def encode_compat_str(string, encoding=preferredencoding(), errors='strict'):
4274 return string if isinstance(string, compat_str) else compat_str(string, encoding, errors)
4275
16392824 4276
a1a530b0
PH
4277US_RATINGS = {
4278 'G': 0,
4279 'PG': 10,
4280 'PG-13': 13,
4281 'R': 16,
4282 'NC': 18,
4283}
fac55558
PH
4284
4285
a8795327 4286TV_PARENTAL_GUIDELINES = {
5a16c9d9
RA
4287 'TV-Y': 0,
4288 'TV-Y7': 7,
4289 'TV-G': 0,
4290 'TV-PG': 0,
4291 'TV-14': 14,
4292 'TV-MA': 17,
a8795327
S
4293}
4294
4295
146c80e2 4296def parse_age_limit(s):
a8795327
S
4297 if type(s) == int:
4298 return s if 0 <= s <= 21 else None
4299 if not isinstance(s, compat_basestring):
d838b1bd 4300 return None
146c80e2 4301 m = re.match(r'^(?P<age>\d{1,2})\+?$', s)
a8795327
S
4302 if m:
4303 return int(m.group('age'))
5c5fae6d 4304 s = s.upper()
a8795327
S
4305 if s in US_RATINGS:
4306 return US_RATINGS[s]
5a16c9d9 4307 m = re.match(r'^TV[_-]?(%s)$' % '|'.join(k[3:] for k in TV_PARENTAL_GUIDELINES), s)
b8361187 4308 if m:
5a16c9d9 4309 return TV_PARENTAL_GUIDELINES['TV-' + m.group(1)]
b8361187 4310 return None
146c80e2
S
4311
4312
fac55558 4313def strip_jsonp(code):
609a61e3 4314 return re.sub(
5552c9eb 4315 r'''(?sx)^
e9c671d5 4316 (?:window\.)?(?P<func_name>[a-zA-Z0-9_.$]*)
5552c9eb
YCH
4317 (?:\s*&&\s*(?P=func_name))?
4318 \s*\(\s*(?P<callback_data>.*)\);?
4319 \s*?(?://[^\n]*)*$''',
4320 r'\g<callback_data>', code)
478c2c61
PH
4321
4322
5c610515 4323def js_to_json(code, vars={}):
4324 # vars is a dict of var, val pairs to substitute
4195096e
S
4325 COMMENT_RE = r'/\*(?:(?!\*/).)*?\*/|//[^\n]*'
4326 SKIP_RE = r'\s*(?:{comment})?\s*'.format(comment=COMMENT_RE)
4327 INTEGER_TABLE = (
4328 (r'(?s)^(0[xX][0-9a-fA-F]+){skip}:?$'.format(skip=SKIP_RE), 16),
4329 (r'(?s)^(0+[0-7]+){skip}:?$'.format(skip=SKIP_RE), 8),
4330 )
4331
e05f6939 4332 def fix_kv(m):
e7b6d122
PH
4333 v = m.group(0)
4334 if v in ('true', 'false', 'null'):
4335 return v
8bdd16b4 4336 elif v.startswith('/*') or v.startswith('//') or v.startswith('!') or v == ',':
bd1e4844 4337 return ""
4338
4339 if v[0] in ("'", '"'):
4340 v = re.sub(r'(?s)\\.|"', lambda m: {
e7b6d122 4341 '"': '\\"',
bd1e4844 4342 "\\'": "'",
4343 '\\\n': '',
4344 '\\x': '\\u00',
4345 }.get(m.group(0), m.group(0)), v[1:-1])
8bdd16b4 4346 else:
4347 for regex, base in INTEGER_TABLE:
4348 im = re.match(regex, v)
4349 if im:
4350 i = int(im.group(1), base)
4351 return '"%d":' % i if v.endswith(':') else '%d' % i
89ac4a19 4352
5c610515 4353 if v in vars:
4354 return vars[v]
4355
e7b6d122 4356 return '"%s"' % v
e05f6939 4357
bd1e4844 4358 return re.sub(r'''(?sx)
4359 "(?:[^"\\]*(?:\\\\|\\['"nurtbfx/\n]))*[^"\\]*"|
4360 '(?:[^'\\]*(?:\\\\|\\['"nurtbfx/\n]))*[^'\\]*'|
4195096e 4361 {comment}|,(?={skip}[\]}}])|
c384d537 4362 (?:(?<![0-9])[eE]|[a-df-zA-DF-Z_])[.a-zA-Z_0-9]*|
4195096e 4363 \b(?:0[xX][0-9a-fA-F]+|0+[0-7]+)(?:{skip}:)?|
8bdd16b4 4364 [0-9]+(?={skip}:)|
4365 !+
4195096e 4366 '''.format(comment=COMMENT_RE, skip=SKIP_RE), fix_kv, code)
e05f6939
PH
4367
4368
478c2c61
PH
4369def qualities(quality_ids):
4370 """ Get a numeric quality value out of a list of possible values """
4371 def q(qid):
4372 try:
4373 return quality_ids.index(qid)
4374 except ValueError:
4375 return -1
4376 return q
4377
acd69589 4378
de6000d9 4379DEFAULT_OUTTMPL = {
4380 'default': '%(title)s [%(id)s].%(ext)s',
72755351 4381 'chapter': '%(title)s - %(section_number)03d %(section_title)s [%(id)s].%(ext)s',
de6000d9 4382}
4383OUTTMPL_TYPES = {
72755351 4384 'chapter': None,
de6000d9 4385 'subtitle': None,
4386 'thumbnail': None,
4387 'description': 'description',
4388 'annotation': 'annotations.xml',
4389 'infojson': 'info.json',
5112f26a 4390 'pl_thumbnail': None,
de6000d9 4391 'pl_description': 'description',
4392 'pl_infojson': 'info.json',
4393}
0a871f68 4394
143db31d 4395# As of [1] format syntax is:
4396# %[mapping_key][conversion_flags][minimum_width][.precision][length_modifier]type
4397# 1. https://docs.python.org/2/library/stdtypes.html#string-formatting
752cda38 4398STR_FORMAT_RE = r'''(?x)
143db31d 4399 (?<!%)
4400 %
752cda38 4401 (?P<has_key>\((?P<key>{0})\))? # mapping key
4402 (?P<format>
4403 (?:[#0\-+ ]+)? # conversion flags (optional)
4404 (?:\d+)? # minimum field width (optional)
4405 (?:\.\d+)? # precision (optional)
4406 [hlL]? # length modifier (optional)
4407 [diouxXeEfFgGcrs] # conversion type
4408 )
143db31d 4409'''
4410
a020a0dc
PH
4411
4412def limit_length(s, length):
4413 """ Add ellipses to overly long strings """
4414 if s is None:
4415 return None
4416 ELLIPSES = '...'
4417 if len(s) > length:
4418 return s[:length - len(ELLIPSES)] + ELLIPSES
4419 return s
48844745
PH
4420
4421
4422def version_tuple(v):
5f9b8394 4423 return tuple(int(e) for e in re.split(r'[-.]', v))
48844745
PH
4424
4425
4426def is_outdated_version(version, limit, assume_new=True):
4427 if not version:
4428 return not assume_new
4429 try:
4430 return version_tuple(version) < version_tuple(limit)
4431 except ValueError:
4432 return not assume_new
732ea2f0
PH
4433
4434
4435def ytdl_is_updateable():
7a5c1cfe 4436 """ Returns if yt-dlp can be updated with -U """
735d865e 4437 return False
4438
732ea2f0
PH
4439 from zipimport import zipimporter
4440
4441 return isinstance(globals().get('__loader__'), zipimporter) or hasattr(sys, 'frozen')
7d4111ed
PH
4442
4443
4444def args_to_str(args):
4445 # Get a short string representation for a subprocess command
702ccf2d 4446 return ' '.join(compat_shlex_quote(a) for a in args)
2ccd1b10
PH
4447
4448
9b9c5355 4449def error_to_compat_str(err):
fdae2358
S
4450 err_str = str(err)
4451 # On python 2 error byte string must be decoded with proper
4452 # encoding rather than ascii
4453 if sys.version_info[0] < 3:
4454 err_str = err_str.decode(preferredencoding())
4455 return err_str
4456
4457
c460bdd5 4458def mimetype2ext(mt):
eb9ee194
S
4459 if mt is None:
4460 return None
4461
765ac263
JMF
4462 ext = {
4463 'audio/mp4': 'm4a',
6c33d24b
YCH
4464 # Per RFC 3003, audio/mpeg can be .mp1, .mp2 or .mp3. Here use .mp3 as
4465 # it's the most popular one
4466 'audio/mpeg': 'mp3',
ba39289d 4467 'audio/x-wav': 'wav',
765ac263
JMF
4468 }.get(mt)
4469 if ext is not None:
4470 return ext
4471
c460bdd5 4472 _, _, res = mt.rpartition('/')
6562d34a 4473 res = res.split(';')[0].strip().lower()
c460bdd5
PH
4474
4475 return {
f6861ec9 4476 '3gpp': '3gp',
cafcf657 4477 'smptett+xml': 'tt',
cafcf657 4478 'ttaf+xml': 'dfxp',
a0d8d704 4479 'ttml+xml': 'ttml',
f6861ec9 4480 'x-flv': 'flv',
a0d8d704 4481 'x-mp4-fragmented': 'mp4',
d4f05d47 4482 'x-ms-sami': 'sami',
a0d8d704 4483 'x-ms-wmv': 'wmv',
b4173f15
RA
4484 'mpegurl': 'm3u8',
4485 'x-mpegurl': 'm3u8',
4486 'vnd.apple.mpegurl': 'm3u8',
4487 'dash+xml': 'mpd',
b4173f15 4488 'f4m+xml': 'f4m',
f164b971 4489 'hds+xml': 'f4m',
e910fe2f 4490 'vnd.ms-sstr+xml': 'ism',
c2b2c7e1 4491 'quicktime': 'mov',
98ce1a3f 4492 'mp2t': 'ts',
39e7107d 4493 'x-wav': 'wav',
c460bdd5
PH
4494 }.get(res, res)
4495
4496
4f3c5e06 4497def parse_codecs(codecs_str):
4498 # http://tools.ietf.org/html/rfc6381
4499 if not codecs_str:
4500 return {}
a0566bbf 4501 split_codecs = list(filter(None, map(
4f3c5e06 4502 lambda str: str.strip(), codecs_str.strip().strip(',').split(','))))
4503 vcodec, acodec = None, None
a0566bbf 4504 for full_codec in split_codecs:
4f3c5e06 4505 codec = full_codec.split('.')[0]
28cc2241 4506 if codec in ('avc1', 'avc2', 'avc3', 'avc4', 'vp9', 'vp8', 'hev1', 'hev2', 'h263', 'h264', 'mp4v', 'hvc1', 'av01', 'theora'):
4f3c5e06 4507 if not vcodec:
4508 vcodec = full_codec
60f5c9fb 4509 elif codec in ('mp4a', 'opus', 'vorbis', 'mp3', 'aac', 'ac-3', 'ec-3', 'eac3', 'dtsc', 'dtse', 'dtsh', 'dtsl'):
4f3c5e06 4510 if not acodec:
4511 acodec = full_codec
4512 else:
60f5c9fb 4513 write_string('WARNING: Unknown codec %s\n' % full_codec, sys.stderr)
4f3c5e06 4514 if not vcodec and not acodec:
a0566bbf 4515 if len(split_codecs) == 2:
4f3c5e06 4516 return {
a0566bbf 4517 'vcodec': split_codecs[0],
4518 'acodec': split_codecs[1],
4f3c5e06 4519 }
4520 else:
4521 return {
4522 'vcodec': vcodec or 'none',
4523 'acodec': acodec or 'none',
4524 }
4525 return {}
4526
4527
2ccd1b10 4528def urlhandle_detect_ext(url_handle):
79298173 4529 getheader = url_handle.headers.get
2ccd1b10 4530
b55ee18f
PH
4531 cd = getheader('Content-Disposition')
4532 if cd:
4533 m = re.match(r'attachment;\s*filename="(?P<filename>[^"]+)"', cd)
4534 if m:
4535 e = determine_ext(m.group('filename'), default_ext=None)
4536 if e:
4537 return e
4538
c460bdd5 4539 return mimetype2ext(getheader('Content-Type'))
05900629
PH
4540
4541
1e399778
YCH
4542def encode_data_uri(data, mime_type):
4543 return 'data:%s;base64,%s' % (mime_type, base64.b64encode(data).decode('ascii'))
4544
4545
05900629 4546def age_restricted(content_limit, age_limit):
6ec6cb4e 4547 """ Returns True iff the content should be blocked """
05900629
PH
4548
4549 if age_limit is None: # No limit set
4550 return False
4551 if content_limit is None:
4552 return False # Content available for everyone
4553 return age_limit < content_limit
61ca9a80
PH
4554
4555
4556def is_html(first_bytes):
4557 """ Detect whether a file contains HTML by examining its first bytes. """
4558
4559 BOMS = [
4560 (b'\xef\xbb\xbf', 'utf-8'),
4561 (b'\x00\x00\xfe\xff', 'utf-32-be'),
4562 (b'\xff\xfe\x00\x00', 'utf-32-le'),
4563 (b'\xff\xfe', 'utf-16-le'),
4564 (b'\xfe\xff', 'utf-16-be'),
4565 ]
4566 for bom, enc in BOMS:
4567 if first_bytes.startswith(bom):
4568 s = first_bytes[len(bom):].decode(enc, 'replace')
4569 break
4570 else:
4571 s = first_bytes.decode('utf-8', 'replace')
4572
4573 return re.match(r'^\s*<', s)
a055469f
PH
4574
4575
4576def determine_protocol(info_dict):
4577 protocol = info_dict.get('protocol')
4578 if protocol is not None:
4579 return protocol
4580
4581 url = info_dict['url']
4582 if url.startswith('rtmp'):
4583 return 'rtmp'
4584 elif url.startswith('mms'):
4585 return 'mms'
4586 elif url.startswith('rtsp'):
4587 return 'rtsp'
4588
4589 ext = determine_ext(url)
4590 if ext == 'm3u8':
4591 return 'm3u8'
4592 elif ext == 'f4m':
4593 return 'f4m'
4594
4595 return compat_urllib_parse_urlparse(url).scheme
cfb56d1a
PH
4596
4597
76d321f6 4598def render_table(header_row, data, delim=False, extraGap=0, hideEmpty=False):
cfb56d1a 4599 """ Render a list of rows, each as a list of values """
76d321f6 4600
4601 def get_max_lens(table):
4602 return [max(len(compat_str(v)) for v in col) for col in zip(*table)]
4603
4604 def filter_using_list(row, filterArray):
4605 return [col for (take, col) in zip(filterArray, row) if take]
4606
4607 if hideEmpty:
4608 max_lens = get_max_lens(data)
4609 header_row = filter_using_list(header_row, max_lens)
4610 data = [filter_using_list(row, max_lens) for row in data]
4611
cfb56d1a 4612 table = [header_row] + data
76d321f6 4613 max_lens = get_max_lens(table)
4614 if delim:
4615 table = [header_row] + [['-' * ml for ml in max_lens]] + data
4616 format_str = ' '.join('%-' + compat_str(ml + extraGap) + 's' for ml in max_lens[:-1]) + ' %s'
cfb56d1a 4617 return '\n'.join(format_str % tuple(row) for row in table)
347de493
PH
4618
4619
4620def _match_one(filter_part, dct):
4621 COMPARISON_OPERATORS = {
4622 '<': operator.lt,
4623 '<=': operator.le,
4624 '>': operator.gt,
4625 '>=': operator.ge,
4626 '=': operator.eq,
4627 '!=': operator.ne,
4628 }
4629 operator_rex = re.compile(r'''(?x)\s*
4630 (?P<key>[a-z_]+)
4631 \s*(?P<op>%s)(?P<none_inclusive>\s*\?)?\s*
4632 (?:
4633 (?P<intval>[0-9.]+(?:[kKmMgGtTpPeEzZyY]i?[Bb]?)?)|
db13c16e 4634 (?P<quote>["\'])(?P<quotedstrval>(?:\\.|(?!(?P=quote)|\\).)+?)(?P=quote)|
347de493
PH
4635 (?P<strval>(?![0-9.])[a-z0-9A-Z]*)
4636 )
4637 \s*$
4638 ''' % '|'.join(map(re.escape, COMPARISON_OPERATORS.keys())))
4639 m = operator_rex.search(filter_part)
4640 if m:
4641 op = COMPARISON_OPERATORS[m.group('op')]
e5a088dc 4642 actual_value = dct.get(m.group('key'))
3089bc74
S
4643 if (m.group('quotedstrval') is not None
4644 or m.group('strval') is not None
e5a088dc
S
4645 # If the original field is a string and matching comparisonvalue is
4646 # a number we should respect the origin of the original field
4647 # and process comparison value as a string (see
067aa17e 4648 # https://github.com/ytdl-org/youtube-dl/issues/11082).
3089bc74
S
4649 or actual_value is not None and m.group('intval') is not None
4650 and isinstance(actual_value, compat_str)):
347de493
PH
4651 if m.group('op') not in ('=', '!='):
4652 raise ValueError(
4653 'Operator %s does not support string values!' % m.group('op'))
db13c16e
S
4654 comparison_value = m.group('quotedstrval') or m.group('strval') or m.group('intval')
4655 quote = m.group('quote')
4656 if quote is not None:
4657 comparison_value = comparison_value.replace(r'\%s' % quote, quote)
347de493
PH
4658 else:
4659 try:
4660 comparison_value = int(m.group('intval'))
4661 except ValueError:
4662 comparison_value = parse_filesize(m.group('intval'))
4663 if comparison_value is None:
4664 comparison_value = parse_filesize(m.group('intval') + 'B')
4665 if comparison_value is None:
4666 raise ValueError(
4667 'Invalid integer value %r in filter part %r' % (
4668 m.group('intval'), filter_part))
347de493
PH
4669 if actual_value is None:
4670 return m.group('none_inclusive')
4671 return op(actual_value, comparison_value)
4672
4673 UNARY_OPERATORS = {
1cc47c66
S
4674 '': lambda v: (v is True) if isinstance(v, bool) else (v is not None),
4675 '!': lambda v: (v is False) if isinstance(v, bool) else (v is None),
347de493
PH
4676 }
4677 operator_rex = re.compile(r'''(?x)\s*
4678 (?P<op>%s)\s*(?P<key>[a-z_]+)
4679 \s*$
4680 ''' % '|'.join(map(re.escape, UNARY_OPERATORS.keys())))
4681 m = operator_rex.search(filter_part)
4682 if m:
4683 op = UNARY_OPERATORS[m.group('op')]
4684 actual_value = dct.get(m.group('key'))
4685 return op(actual_value)
4686
4687 raise ValueError('Invalid filter part %r' % filter_part)
4688
4689
4690def match_str(filter_str, dct):
4691 """ Filter a dictionary with a simple string syntax. Returns True (=passes filter) or false """
4692
4693 return all(
4694 _match_one(filter_part, dct) for filter_part in filter_str.split('&'))
4695
4696
4697def match_filter_func(filter_str):
4698 def _match_func(info_dict):
4699 if match_str(filter_str, info_dict):
4700 return None
4701 else:
4702 video_title = info_dict.get('title', info_dict.get('id', 'video'))
4703 return '%s does not pass filter %s, skipping ..' % (video_title, filter_str)
4704 return _match_func
91410c9b
PH
4705
4706
bf6427d2
YCH
4707def parse_dfxp_time_expr(time_expr):
4708 if not time_expr:
d631d5f9 4709 return
bf6427d2
YCH
4710
4711 mobj = re.match(r'^(?P<time_offset>\d+(?:\.\d+)?)s?$', time_expr)
4712 if mobj:
4713 return float(mobj.group('time_offset'))
4714
db2fe38b 4715 mobj = re.match(r'^(\d+):(\d\d):(\d\d(?:(?:\.|:)\d+)?)$', time_expr)
bf6427d2 4716 if mobj:
db2fe38b 4717 return 3600 * int(mobj.group(1)) + 60 * int(mobj.group(2)) + float(mobj.group(3).replace(':', '.'))
bf6427d2
YCH
4718
4719
c1c924ab
YCH
4720def srt_subtitles_timecode(seconds):
4721 return '%02d:%02d:%02d,%03d' % (seconds / 3600, (seconds % 3600) / 60, seconds % 60, (seconds % 1) * 1000)
bf6427d2
YCH
4722
4723
4724def dfxp2srt(dfxp_data):
3869028f
YCH
4725 '''
4726 @param dfxp_data A bytes-like object containing DFXP data
4727 @returns A unicode object containing converted SRT data
4728 '''
5b995f71 4729 LEGACY_NAMESPACES = (
3869028f
YCH
4730 (b'http://www.w3.org/ns/ttml', [
4731 b'http://www.w3.org/2004/11/ttaf1',
4732 b'http://www.w3.org/2006/04/ttaf1',
4733 b'http://www.w3.org/2006/10/ttaf1',
5b995f71 4734 ]),
3869028f
YCH
4735 (b'http://www.w3.org/ns/ttml#styling', [
4736 b'http://www.w3.org/ns/ttml#style',
5b995f71
RA
4737 ]),
4738 )
4739
4740 SUPPORTED_STYLING = [
4741 'color',
4742 'fontFamily',
4743 'fontSize',
4744 'fontStyle',
4745 'fontWeight',
4746 'textDecoration'
4747 ]
4748
4e335771 4749 _x = functools.partial(xpath_with_ns, ns_map={
261f4730 4750 'xml': 'http://www.w3.org/XML/1998/namespace',
4e335771 4751 'ttml': 'http://www.w3.org/ns/ttml',
5b995f71 4752 'tts': 'http://www.w3.org/ns/ttml#styling',
4e335771 4753 })
bf6427d2 4754
5b995f71
RA
4755 styles = {}
4756 default_style = {}
4757
87de7069 4758 class TTMLPElementParser(object):
5b995f71
RA
4759 _out = ''
4760 _unclosed_elements = []
4761 _applied_styles = []
bf6427d2 4762
2b14cb56 4763 def start(self, tag, attrib):
5b995f71
RA
4764 if tag in (_x('ttml:br'), 'br'):
4765 self._out += '\n'
4766 else:
4767 unclosed_elements = []
4768 style = {}
4769 element_style_id = attrib.get('style')
4770 if default_style:
4771 style.update(default_style)
4772 if element_style_id:
4773 style.update(styles.get(element_style_id, {}))
4774 for prop in SUPPORTED_STYLING:
4775 prop_val = attrib.get(_x('tts:' + prop))
4776 if prop_val:
4777 style[prop] = prop_val
4778 if style:
4779 font = ''
4780 for k, v in sorted(style.items()):
4781 if self._applied_styles and self._applied_styles[-1].get(k) == v:
4782 continue
4783 if k == 'color':
4784 font += ' color="%s"' % v
4785 elif k == 'fontSize':
4786 font += ' size="%s"' % v
4787 elif k == 'fontFamily':
4788 font += ' face="%s"' % v
4789 elif k == 'fontWeight' and v == 'bold':
4790 self._out += '<b>'
4791 unclosed_elements.append('b')
4792 elif k == 'fontStyle' and v == 'italic':
4793 self._out += '<i>'
4794 unclosed_elements.append('i')
4795 elif k == 'textDecoration' and v == 'underline':
4796 self._out += '<u>'
4797 unclosed_elements.append('u')
4798 if font:
4799 self._out += '<font' + font + '>'
4800 unclosed_elements.append('font')
4801 applied_style = {}
4802 if self._applied_styles:
4803 applied_style.update(self._applied_styles[-1])
4804 applied_style.update(style)
4805 self._applied_styles.append(applied_style)
4806 self._unclosed_elements.append(unclosed_elements)
bf6427d2 4807
2b14cb56 4808 def end(self, tag):
5b995f71
RA
4809 if tag not in (_x('ttml:br'), 'br'):
4810 unclosed_elements = self._unclosed_elements.pop()
4811 for element in reversed(unclosed_elements):
4812 self._out += '</%s>' % element
4813 if unclosed_elements and self._applied_styles:
4814 self._applied_styles.pop()
bf6427d2 4815
2b14cb56 4816 def data(self, data):
5b995f71 4817 self._out += data
2b14cb56 4818
4819 def close(self):
5b995f71 4820 return self._out.strip()
2b14cb56 4821
4822 def parse_node(node):
4823 target = TTMLPElementParser()
4824 parser = xml.etree.ElementTree.XMLParser(target=target)
4825 parser.feed(xml.etree.ElementTree.tostring(node))
4826 return parser.close()
bf6427d2 4827
5b995f71
RA
4828 for k, v in LEGACY_NAMESPACES:
4829 for ns in v:
4830 dfxp_data = dfxp_data.replace(ns, k)
4831
3869028f 4832 dfxp = compat_etree_fromstring(dfxp_data)
bf6427d2 4833 out = []
5b995f71 4834 paras = dfxp.findall(_x('.//ttml:p')) or dfxp.findall('.//p')
1b0427e6
YCH
4835
4836 if not paras:
4837 raise ValueError('Invalid dfxp/TTML subtitle')
bf6427d2 4838
5b995f71
RA
4839 repeat = False
4840 while True:
4841 for style in dfxp.findall(_x('.//ttml:style')):
261f4730
RA
4842 style_id = style.get('id') or style.get(_x('xml:id'))
4843 if not style_id:
4844 continue
5b995f71
RA
4845 parent_style_id = style.get('style')
4846 if parent_style_id:
4847 if parent_style_id not in styles:
4848 repeat = True
4849 continue
4850 styles[style_id] = styles[parent_style_id].copy()
4851 for prop in SUPPORTED_STYLING:
4852 prop_val = style.get(_x('tts:' + prop))
4853 if prop_val:
4854 styles.setdefault(style_id, {})[prop] = prop_val
4855 if repeat:
4856 repeat = False
4857 else:
4858 break
4859
4860 for p in ('body', 'div'):
4861 ele = xpath_element(dfxp, [_x('.//ttml:' + p), './/' + p])
4862 if ele is None:
4863 continue
4864 style = styles.get(ele.get('style'))
4865 if not style:
4866 continue
4867 default_style.update(style)
4868
bf6427d2 4869 for para, index in zip(paras, itertools.count(1)):
d631d5f9 4870 begin_time = parse_dfxp_time_expr(para.attrib.get('begin'))
7dff0363 4871 end_time = parse_dfxp_time_expr(para.attrib.get('end'))
d631d5f9
YCH
4872 dur = parse_dfxp_time_expr(para.attrib.get('dur'))
4873 if begin_time is None:
4874 continue
7dff0363 4875 if not end_time:
d631d5f9
YCH
4876 if not dur:
4877 continue
4878 end_time = begin_time + dur
bf6427d2
YCH
4879 out.append('%d\n%s --> %s\n%s\n\n' % (
4880 index,
c1c924ab
YCH
4881 srt_subtitles_timecode(begin_time),
4882 srt_subtitles_timecode(end_time),
bf6427d2
YCH
4883 parse_node(para)))
4884
4885 return ''.join(out)
4886
4887
66e289ba
S
4888def cli_option(params, command_option, param):
4889 param = params.get(param)
98e698f1
RA
4890 if param:
4891 param = compat_str(param)
66e289ba
S
4892 return [command_option, param] if param is not None else []
4893
4894
4895def cli_bool_option(params, command_option, param, true_value='true', false_value='false', separator=None):
4896 param = params.get(param)
5b232f46
S
4897 if param is None:
4898 return []
66e289ba
S
4899 assert isinstance(param, bool)
4900 if separator:
4901 return [command_option + separator + (true_value if param else false_value)]
4902 return [command_option, true_value if param else false_value]
4903
4904
4905def cli_valueless_option(params, command_option, param, expected_value=True):
4906 param = params.get(param)
4907 return [command_option] if param == expected_value else []
4908
4909
e92caff5 4910def cli_configuration_args(argdict, keys, default=[], use_compat=True):
eab9b2bc 4911 if isinstance(argdict, (list, tuple)): # for backward compatibility
e92caff5 4912 if use_compat:
5b1ecbb3 4913 return argdict
4914 else:
4915 argdict = None
eab9b2bc 4916 if argdict is None:
5b1ecbb3 4917 return default
eab9b2bc 4918 assert isinstance(argdict, dict)
4919
e92caff5 4920 assert isinstance(keys, (list, tuple))
4921 for key_list in keys:
4922 if isinstance(key_list, compat_str):
4923 key_list = (key_list,)
4924 arg_list = list(filter(
4925 lambda x: x is not None,
4926 [argdict.get(key.lower()) for key in key_list]))
4927 if arg_list:
4928 return [arg for args in arg_list for arg in args]
4929 return default
66e289ba
S
4930
4931
39672624
YCH
4932class ISO639Utils(object):
4933 # See http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt
4934 _lang_map = {
4935 'aa': 'aar',
4936 'ab': 'abk',
4937 'ae': 'ave',
4938 'af': 'afr',
4939 'ak': 'aka',
4940 'am': 'amh',
4941 'an': 'arg',
4942 'ar': 'ara',
4943 'as': 'asm',
4944 'av': 'ava',
4945 'ay': 'aym',
4946 'az': 'aze',
4947 'ba': 'bak',
4948 'be': 'bel',
4949 'bg': 'bul',
4950 'bh': 'bih',
4951 'bi': 'bis',
4952 'bm': 'bam',
4953 'bn': 'ben',
4954 'bo': 'bod',
4955 'br': 'bre',
4956 'bs': 'bos',
4957 'ca': 'cat',
4958 'ce': 'che',
4959 'ch': 'cha',
4960 'co': 'cos',
4961 'cr': 'cre',
4962 'cs': 'ces',
4963 'cu': 'chu',
4964 'cv': 'chv',
4965 'cy': 'cym',
4966 'da': 'dan',
4967 'de': 'deu',
4968 'dv': 'div',
4969 'dz': 'dzo',
4970 'ee': 'ewe',
4971 'el': 'ell',
4972 'en': 'eng',
4973 'eo': 'epo',
4974 'es': 'spa',
4975 'et': 'est',
4976 'eu': 'eus',
4977 'fa': 'fas',
4978 'ff': 'ful',
4979 'fi': 'fin',
4980 'fj': 'fij',
4981 'fo': 'fao',
4982 'fr': 'fra',
4983 'fy': 'fry',
4984 'ga': 'gle',
4985 'gd': 'gla',
4986 'gl': 'glg',
4987 'gn': 'grn',
4988 'gu': 'guj',
4989 'gv': 'glv',
4990 'ha': 'hau',
4991 'he': 'heb',
b7acc835 4992 'iw': 'heb', # Replaced by he in 1989 revision
39672624
YCH
4993 'hi': 'hin',
4994 'ho': 'hmo',
4995 'hr': 'hrv',
4996 'ht': 'hat',
4997 'hu': 'hun',
4998 'hy': 'hye',
4999 'hz': 'her',
5000 'ia': 'ina',
5001 'id': 'ind',
b7acc835 5002 'in': 'ind', # Replaced by id in 1989 revision
39672624
YCH
5003 'ie': 'ile',
5004 'ig': 'ibo',
5005 'ii': 'iii',
5006 'ik': 'ipk',
5007 'io': 'ido',
5008 'is': 'isl',
5009 'it': 'ita',
5010 'iu': 'iku',
5011 'ja': 'jpn',
5012 'jv': 'jav',
5013 'ka': 'kat',
5014 'kg': 'kon',
5015 'ki': 'kik',
5016 'kj': 'kua',
5017 'kk': 'kaz',
5018 'kl': 'kal',
5019 'km': 'khm',
5020 'kn': 'kan',
5021 'ko': 'kor',
5022 'kr': 'kau',
5023 'ks': 'kas',
5024 'ku': 'kur',
5025 'kv': 'kom',
5026 'kw': 'cor',
5027 'ky': 'kir',
5028 'la': 'lat',
5029 'lb': 'ltz',
5030 'lg': 'lug',
5031 'li': 'lim',
5032 'ln': 'lin',
5033 'lo': 'lao',
5034 'lt': 'lit',
5035 'lu': 'lub',
5036 'lv': 'lav',
5037 'mg': 'mlg',
5038 'mh': 'mah',
5039 'mi': 'mri',
5040 'mk': 'mkd',
5041 'ml': 'mal',
5042 'mn': 'mon',
5043 'mr': 'mar',
5044 'ms': 'msa',
5045 'mt': 'mlt',
5046 'my': 'mya',
5047 'na': 'nau',
5048 'nb': 'nob',
5049 'nd': 'nde',
5050 'ne': 'nep',
5051 'ng': 'ndo',
5052 'nl': 'nld',
5053 'nn': 'nno',
5054 'no': 'nor',
5055 'nr': 'nbl',
5056 'nv': 'nav',
5057 'ny': 'nya',
5058 'oc': 'oci',
5059 'oj': 'oji',
5060 'om': 'orm',
5061 'or': 'ori',
5062 'os': 'oss',
5063 'pa': 'pan',
5064 'pi': 'pli',
5065 'pl': 'pol',
5066 'ps': 'pus',
5067 'pt': 'por',
5068 'qu': 'que',
5069 'rm': 'roh',
5070 'rn': 'run',
5071 'ro': 'ron',
5072 'ru': 'rus',
5073 'rw': 'kin',
5074 'sa': 'san',
5075 'sc': 'srd',
5076 'sd': 'snd',
5077 'se': 'sme',
5078 'sg': 'sag',
5079 'si': 'sin',
5080 'sk': 'slk',
5081 'sl': 'slv',
5082 'sm': 'smo',
5083 'sn': 'sna',
5084 'so': 'som',
5085 'sq': 'sqi',
5086 'sr': 'srp',
5087 'ss': 'ssw',
5088 'st': 'sot',
5089 'su': 'sun',
5090 'sv': 'swe',
5091 'sw': 'swa',
5092 'ta': 'tam',
5093 'te': 'tel',
5094 'tg': 'tgk',
5095 'th': 'tha',
5096 'ti': 'tir',
5097 'tk': 'tuk',
5098 'tl': 'tgl',
5099 'tn': 'tsn',
5100 'to': 'ton',
5101 'tr': 'tur',
5102 'ts': 'tso',
5103 'tt': 'tat',
5104 'tw': 'twi',
5105 'ty': 'tah',
5106 'ug': 'uig',
5107 'uk': 'ukr',
5108 'ur': 'urd',
5109 'uz': 'uzb',
5110 've': 'ven',
5111 'vi': 'vie',
5112 'vo': 'vol',
5113 'wa': 'wln',
5114 'wo': 'wol',
5115 'xh': 'xho',
5116 'yi': 'yid',
e9a50fba 5117 'ji': 'yid', # Replaced by yi in 1989 revision
39672624
YCH
5118 'yo': 'yor',
5119 'za': 'zha',
5120 'zh': 'zho',
5121 'zu': 'zul',
5122 }
5123
5124 @classmethod
5125 def short2long(cls, code):
5126 """Convert language code from ISO 639-1 to ISO 639-2/T"""
5127 return cls._lang_map.get(code[:2])
5128
5129 @classmethod
5130 def long2short(cls, code):
5131 """Convert language code from ISO 639-2/T to ISO 639-1"""
5132 for short_name, long_name in cls._lang_map.items():
5133 if long_name == code:
5134 return short_name
5135
5136
4eb10f66
YCH
5137class ISO3166Utils(object):
5138 # From http://data.okfn.org/data/core/country-list
5139 _country_map = {
5140 'AF': 'Afghanistan',
5141 'AX': 'Åland Islands',
5142 'AL': 'Albania',
5143 'DZ': 'Algeria',
5144 'AS': 'American Samoa',
5145 'AD': 'Andorra',
5146 'AO': 'Angola',
5147 'AI': 'Anguilla',
5148 'AQ': 'Antarctica',
5149 'AG': 'Antigua and Barbuda',
5150 'AR': 'Argentina',
5151 'AM': 'Armenia',
5152 'AW': 'Aruba',
5153 'AU': 'Australia',
5154 'AT': 'Austria',
5155 'AZ': 'Azerbaijan',
5156 'BS': 'Bahamas',
5157 'BH': 'Bahrain',
5158 'BD': 'Bangladesh',
5159 'BB': 'Barbados',
5160 'BY': 'Belarus',
5161 'BE': 'Belgium',
5162 'BZ': 'Belize',
5163 'BJ': 'Benin',
5164 'BM': 'Bermuda',
5165 'BT': 'Bhutan',
5166 'BO': 'Bolivia, Plurinational State of',
5167 'BQ': 'Bonaire, Sint Eustatius and Saba',
5168 'BA': 'Bosnia and Herzegovina',
5169 'BW': 'Botswana',
5170 'BV': 'Bouvet Island',
5171 'BR': 'Brazil',
5172 'IO': 'British Indian Ocean Territory',
5173 'BN': 'Brunei Darussalam',
5174 'BG': 'Bulgaria',
5175 'BF': 'Burkina Faso',
5176 'BI': 'Burundi',
5177 'KH': 'Cambodia',
5178 'CM': 'Cameroon',
5179 'CA': 'Canada',
5180 'CV': 'Cape Verde',
5181 'KY': 'Cayman Islands',
5182 'CF': 'Central African Republic',
5183 'TD': 'Chad',
5184 'CL': 'Chile',
5185 'CN': 'China',
5186 'CX': 'Christmas Island',
5187 'CC': 'Cocos (Keeling) Islands',
5188 'CO': 'Colombia',
5189 'KM': 'Comoros',
5190 'CG': 'Congo',
5191 'CD': 'Congo, the Democratic Republic of the',
5192 'CK': 'Cook Islands',
5193 'CR': 'Costa Rica',
5194 'CI': 'Côte d\'Ivoire',
5195 'HR': 'Croatia',
5196 'CU': 'Cuba',
5197 'CW': 'Curaçao',
5198 'CY': 'Cyprus',
5199 'CZ': 'Czech Republic',
5200 'DK': 'Denmark',
5201 'DJ': 'Djibouti',
5202 'DM': 'Dominica',
5203 'DO': 'Dominican Republic',
5204 'EC': 'Ecuador',
5205 'EG': 'Egypt',
5206 'SV': 'El Salvador',
5207 'GQ': 'Equatorial Guinea',
5208 'ER': 'Eritrea',
5209 'EE': 'Estonia',
5210 'ET': 'Ethiopia',
5211 'FK': 'Falkland Islands (Malvinas)',
5212 'FO': 'Faroe Islands',
5213 'FJ': 'Fiji',
5214 'FI': 'Finland',
5215 'FR': 'France',
5216 'GF': 'French Guiana',
5217 'PF': 'French Polynesia',
5218 'TF': 'French Southern Territories',
5219 'GA': 'Gabon',
5220 'GM': 'Gambia',
5221 'GE': 'Georgia',
5222 'DE': 'Germany',
5223 'GH': 'Ghana',
5224 'GI': 'Gibraltar',
5225 'GR': 'Greece',
5226 'GL': 'Greenland',
5227 'GD': 'Grenada',
5228 'GP': 'Guadeloupe',
5229 'GU': 'Guam',
5230 'GT': 'Guatemala',
5231 'GG': 'Guernsey',
5232 'GN': 'Guinea',
5233 'GW': 'Guinea-Bissau',
5234 'GY': 'Guyana',
5235 'HT': 'Haiti',
5236 'HM': 'Heard Island and McDonald Islands',
5237 'VA': 'Holy See (Vatican City State)',
5238 'HN': 'Honduras',
5239 'HK': 'Hong Kong',
5240 'HU': 'Hungary',
5241 'IS': 'Iceland',
5242 'IN': 'India',
5243 'ID': 'Indonesia',
5244 'IR': 'Iran, Islamic Republic of',
5245 'IQ': 'Iraq',
5246 'IE': 'Ireland',
5247 'IM': 'Isle of Man',
5248 'IL': 'Israel',
5249 'IT': 'Italy',
5250 'JM': 'Jamaica',
5251 'JP': 'Japan',
5252 'JE': 'Jersey',
5253 'JO': 'Jordan',
5254 'KZ': 'Kazakhstan',
5255 'KE': 'Kenya',
5256 'KI': 'Kiribati',
5257 'KP': 'Korea, Democratic People\'s Republic of',
5258 'KR': 'Korea, Republic of',
5259 'KW': 'Kuwait',
5260 'KG': 'Kyrgyzstan',
5261 'LA': 'Lao People\'s Democratic Republic',
5262 'LV': 'Latvia',
5263 'LB': 'Lebanon',
5264 'LS': 'Lesotho',
5265 'LR': 'Liberia',
5266 'LY': 'Libya',
5267 'LI': 'Liechtenstein',
5268 'LT': 'Lithuania',
5269 'LU': 'Luxembourg',
5270 'MO': 'Macao',
5271 'MK': 'Macedonia, the Former Yugoslav Republic of',
5272 'MG': 'Madagascar',
5273 'MW': 'Malawi',
5274 'MY': 'Malaysia',
5275 'MV': 'Maldives',
5276 'ML': 'Mali',
5277 'MT': 'Malta',
5278 'MH': 'Marshall Islands',
5279 'MQ': 'Martinique',
5280 'MR': 'Mauritania',
5281 'MU': 'Mauritius',
5282 'YT': 'Mayotte',
5283 'MX': 'Mexico',
5284 'FM': 'Micronesia, Federated States of',
5285 'MD': 'Moldova, Republic of',
5286 'MC': 'Monaco',
5287 'MN': 'Mongolia',
5288 'ME': 'Montenegro',
5289 'MS': 'Montserrat',
5290 'MA': 'Morocco',
5291 'MZ': 'Mozambique',
5292 'MM': 'Myanmar',
5293 'NA': 'Namibia',
5294 'NR': 'Nauru',
5295 'NP': 'Nepal',
5296 'NL': 'Netherlands',
5297 'NC': 'New Caledonia',
5298 'NZ': 'New Zealand',
5299 'NI': 'Nicaragua',
5300 'NE': 'Niger',
5301 'NG': 'Nigeria',
5302 'NU': 'Niue',
5303 'NF': 'Norfolk Island',
5304 'MP': 'Northern Mariana Islands',
5305 'NO': 'Norway',
5306 'OM': 'Oman',
5307 'PK': 'Pakistan',
5308 'PW': 'Palau',
5309 'PS': 'Palestine, State of',
5310 'PA': 'Panama',
5311 'PG': 'Papua New Guinea',
5312 'PY': 'Paraguay',
5313 'PE': 'Peru',
5314 'PH': 'Philippines',
5315 'PN': 'Pitcairn',
5316 'PL': 'Poland',
5317 'PT': 'Portugal',
5318 'PR': 'Puerto Rico',
5319 'QA': 'Qatar',
5320 'RE': 'Réunion',
5321 'RO': 'Romania',
5322 'RU': 'Russian Federation',
5323 'RW': 'Rwanda',
5324 'BL': 'Saint Barthélemy',
5325 'SH': 'Saint Helena, Ascension and Tristan da Cunha',
5326 'KN': 'Saint Kitts and Nevis',
5327 'LC': 'Saint Lucia',
5328 'MF': 'Saint Martin (French part)',
5329 'PM': 'Saint Pierre and Miquelon',
5330 'VC': 'Saint Vincent and the Grenadines',
5331 'WS': 'Samoa',
5332 'SM': 'San Marino',
5333 'ST': 'Sao Tome and Principe',
5334 'SA': 'Saudi Arabia',
5335 'SN': 'Senegal',
5336 'RS': 'Serbia',
5337 'SC': 'Seychelles',
5338 'SL': 'Sierra Leone',
5339 'SG': 'Singapore',
5340 'SX': 'Sint Maarten (Dutch part)',
5341 'SK': 'Slovakia',
5342 'SI': 'Slovenia',
5343 'SB': 'Solomon Islands',
5344 'SO': 'Somalia',
5345 'ZA': 'South Africa',
5346 'GS': 'South Georgia and the South Sandwich Islands',
5347 'SS': 'South Sudan',
5348 'ES': 'Spain',
5349 'LK': 'Sri Lanka',
5350 'SD': 'Sudan',
5351 'SR': 'Suriname',
5352 'SJ': 'Svalbard and Jan Mayen',
5353 'SZ': 'Swaziland',
5354 'SE': 'Sweden',
5355 'CH': 'Switzerland',
5356 'SY': 'Syrian Arab Republic',
5357 'TW': 'Taiwan, Province of China',
5358 'TJ': 'Tajikistan',
5359 'TZ': 'Tanzania, United Republic of',
5360 'TH': 'Thailand',
5361 'TL': 'Timor-Leste',
5362 'TG': 'Togo',
5363 'TK': 'Tokelau',
5364 'TO': 'Tonga',
5365 'TT': 'Trinidad and Tobago',
5366 'TN': 'Tunisia',
5367 'TR': 'Turkey',
5368 'TM': 'Turkmenistan',
5369 'TC': 'Turks and Caicos Islands',
5370 'TV': 'Tuvalu',
5371 'UG': 'Uganda',
5372 'UA': 'Ukraine',
5373 'AE': 'United Arab Emirates',
5374 'GB': 'United Kingdom',
5375 'US': 'United States',
5376 'UM': 'United States Minor Outlying Islands',
5377 'UY': 'Uruguay',
5378 'UZ': 'Uzbekistan',
5379 'VU': 'Vanuatu',
5380 'VE': 'Venezuela, Bolivarian Republic of',
5381 'VN': 'Viet Nam',
5382 'VG': 'Virgin Islands, British',
5383 'VI': 'Virgin Islands, U.S.',
5384 'WF': 'Wallis and Futuna',
5385 'EH': 'Western Sahara',
5386 'YE': 'Yemen',
5387 'ZM': 'Zambia',
5388 'ZW': 'Zimbabwe',
5389 }
5390
5391 @classmethod
5392 def short2full(cls, code):
5393 """Convert an ISO 3166-2 country code to the corresponding full name"""
5394 return cls._country_map.get(code.upper())
5395
5396
773f291d
S
5397class GeoUtils(object):
5398 # Major IPv4 address blocks per country
5399 _country_ip_map = {
53896ca5 5400 'AD': '46.172.224.0/19',
773f291d
S
5401 'AE': '94.200.0.0/13',
5402 'AF': '149.54.0.0/17',
5403 'AG': '209.59.64.0/18',
5404 'AI': '204.14.248.0/21',
5405 'AL': '46.99.0.0/16',
5406 'AM': '46.70.0.0/15',
5407 'AO': '105.168.0.0/13',
53896ca5
S
5408 'AP': '182.50.184.0/21',
5409 'AQ': '23.154.160.0/24',
773f291d
S
5410 'AR': '181.0.0.0/12',
5411 'AS': '202.70.112.0/20',
53896ca5 5412 'AT': '77.116.0.0/14',
773f291d
S
5413 'AU': '1.128.0.0/11',
5414 'AW': '181.41.0.0/18',
53896ca5
S
5415 'AX': '185.217.4.0/22',
5416 'AZ': '5.197.0.0/16',
773f291d
S
5417 'BA': '31.176.128.0/17',
5418 'BB': '65.48.128.0/17',
5419 'BD': '114.130.0.0/16',
5420 'BE': '57.0.0.0/8',
53896ca5 5421 'BF': '102.178.0.0/15',
773f291d
S
5422 'BG': '95.42.0.0/15',
5423 'BH': '37.131.0.0/17',
5424 'BI': '154.117.192.0/18',
5425 'BJ': '137.255.0.0/16',
53896ca5 5426 'BL': '185.212.72.0/23',
773f291d
S
5427 'BM': '196.12.64.0/18',
5428 'BN': '156.31.0.0/16',
5429 'BO': '161.56.0.0/16',
5430 'BQ': '161.0.80.0/20',
53896ca5 5431 'BR': '191.128.0.0/12',
773f291d
S
5432 'BS': '24.51.64.0/18',
5433 'BT': '119.2.96.0/19',
5434 'BW': '168.167.0.0/16',
5435 'BY': '178.120.0.0/13',
5436 'BZ': '179.42.192.0/18',
5437 'CA': '99.224.0.0/11',
5438 'CD': '41.243.0.0/16',
53896ca5
S
5439 'CF': '197.242.176.0/21',
5440 'CG': '160.113.0.0/16',
773f291d 5441 'CH': '85.0.0.0/13',
53896ca5 5442 'CI': '102.136.0.0/14',
773f291d
S
5443 'CK': '202.65.32.0/19',
5444 'CL': '152.172.0.0/14',
53896ca5 5445 'CM': '102.244.0.0/14',
773f291d
S
5446 'CN': '36.128.0.0/10',
5447 'CO': '181.240.0.0/12',
5448 'CR': '201.192.0.0/12',
5449 'CU': '152.206.0.0/15',
5450 'CV': '165.90.96.0/19',
5451 'CW': '190.88.128.0/17',
53896ca5 5452 'CY': '31.153.0.0/16',
773f291d
S
5453 'CZ': '88.100.0.0/14',
5454 'DE': '53.0.0.0/8',
5455 'DJ': '197.241.0.0/17',
5456 'DK': '87.48.0.0/12',
5457 'DM': '192.243.48.0/20',
5458 'DO': '152.166.0.0/15',
5459 'DZ': '41.96.0.0/12',
5460 'EC': '186.68.0.0/15',
5461 'EE': '90.190.0.0/15',
5462 'EG': '156.160.0.0/11',
5463 'ER': '196.200.96.0/20',
5464 'ES': '88.0.0.0/11',
5465 'ET': '196.188.0.0/14',
5466 'EU': '2.16.0.0/13',
5467 'FI': '91.152.0.0/13',
5468 'FJ': '144.120.0.0/16',
53896ca5 5469 'FK': '80.73.208.0/21',
773f291d
S
5470 'FM': '119.252.112.0/20',
5471 'FO': '88.85.32.0/19',
5472 'FR': '90.0.0.0/9',
5473 'GA': '41.158.0.0/15',
5474 'GB': '25.0.0.0/8',
5475 'GD': '74.122.88.0/21',
5476 'GE': '31.146.0.0/16',
5477 'GF': '161.22.64.0/18',
5478 'GG': '62.68.160.0/19',
53896ca5
S
5479 'GH': '154.160.0.0/12',
5480 'GI': '95.164.0.0/16',
773f291d
S
5481 'GL': '88.83.0.0/19',
5482 'GM': '160.182.0.0/15',
5483 'GN': '197.149.192.0/18',
5484 'GP': '104.250.0.0/19',
5485 'GQ': '105.235.224.0/20',
5486 'GR': '94.64.0.0/13',
5487 'GT': '168.234.0.0/16',
5488 'GU': '168.123.0.0/16',
5489 'GW': '197.214.80.0/20',
5490 'GY': '181.41.64.0/18',
5491 'HK': '113.252.0.0/14',
5492 'HN': '181.210.0.0/16',
5493 'HR': '93.136.0.0/13',
5494 'HT': '148.102.128.0/17',
5495 'HU': '84.0.0.0/14',
5496 'ID': '39.192.0.0/10',
5497 'IE': '87.32.0.0/12',
5498 'IL': '79.176.0.0/13',
5499 'IM': '5.62.80.0/20',
5500 'IN': '117.192.0.0/10',
5501 'IO': '203.83.48.0/21',
5502 'IQ': '37.236.0.0/14',
5503 'IR': '2.176.0.0/12',
5504 'IS': '82.221.0.0/16',
5505 'IT': '79.0.0.0/10',
5506 'JE': '87.244.64.0/18',
5507 'JM': '72.27.0.0/17',
5508 'JO': '176.29.0.0/16',
53896ca5 5509 'JP': '133.0.0.0/8',
773f291d
S
5510 'KE': '105.48.0.0/12',
5511 'KG': '158.181.128.0/17',
5512 'KH': '36.37.128.0/17',
5513 'KI': '103.25.140.0/22',
5514 'KM': '197.255.224.0/20',
53896ca5 5515 'KN': '198.167.192.0/19',
773f291d
S
5516 'KP': '175.45.176.0/22',
5517 'KR': '175.192.0.0/10',
5518 'KW': '37.36.0.0/14',
5519 'KY': '64.96.0.0/15',
5520 'KZ': '2.72.0.0/13',
5521 'LA': '115.84.64.0/18',
5522 'LB': '178.135.0.0/16',
53896ca5 5523 'LC': '24.92.144.0/20',
773f291d
S
5524 'LI': '82.117.0.0/19',
5525 'LK': '112.134.0.0/15',
53896ca5 5526 'LR': '102.183.0.0/16',
773f291d
S
5527 'LS': '129.232.0.0/17',
5528 'LT': '78.56.0.0/13',
5529 'LU': '188.42.0.0/16',
5530 'LV': '46.109.0.0/16',
5531 'LY': '41.252.0.0/14',
5532 'MA': '105.128.0.0/11',
5533 'MC': '88.209.64.0/18',
5534 'MD': '37.246.0.0/16',
5535 'ME': '178.175.0.0/17',
5536 'MF': '74.112.232.0/21',
5537 'MG': '154.126.0.0/17',
5538 'MH': '117.103.88.0/21',
5539 'MK': '77.28.0.0/15',
5540 'ML': '154.118.128.0/18',
5541 'MM': '37.111.0.0/17',
5542 'MN': '49.0.128.0/17',
5543 'MO': '60.246.0.0/16',
5544 'MP': '202.88.64.0/20',
5545 'MQ': '109.203.224.0/19',
5546 'MR': '41.188.64.0/18',
5547 'MS': '208.90.112.0/22',
5548 'MT': '46.11.0.0/16',
5549 'MU': '105.16.0.0/12',
5550 'MV': '27.114.128.0/18',
53896ca5 5551 'MW': '102.70.0.0/15',
773f291d
S
5552 'MX': '187.192.0.0/11',
5553 'MY': '175.136.0.0/13',
5554 'MZ': '197.218.0.0/15',
5555 'NA': '41.182.0.0/16',
5556 'NC': '101.101.0.0/18',
5557 'NE': '197.214.0.0/18',
5558 'NF': '203.17.240.0/22',
5559 'NG': '105.112.0.0/12',
5560 'NI': '186.76.0.0/15',
5561 'NL': '145.96.0.0/11',
5562 'NO': '84.208.0.0/13',
5563 'NP': '36.252.0.0/15',
5564 'NR': '203.98.224.0/19',
5565 'NU': '49.156.48.0/22',
5566 'NZ': '49.224.0.0/14',
5567 'OM': '5.36.0.0/15',
5568 'PA': '186.72.0.0/15',
5569 'PE': '186.160.0.0/14',
5570 'PF': '123.50.64.0/18',
5571 'PG': '124.240.192.0/19',
5572 'PH': '49.144.0.0/13',
5573 'PK': '39.32.0.0/11',
5574 'PL': '83.0.0.0/11',
5575 'PM': '70.36.0.0/20',
5576 'PR': '66.50.0.0/16',
5577 'PS': '188.161.0.0/16',
5578 'PT': '85.240.0.0/13',
5579 'PW': '202.124.224.0/20',
5580 'PY': '181.120.0.0/14',
5581 'QA': '37.210.0.0/15',
53896ca5 5582 'RE': '102.35.0.0/16',
773f291d 5583 'RO': '79.112.0.0/13',
53896ca5 5584 'RS': '93.86.0.0/15',
773f291d 5585 'RU': '5.136.0.0/13',
53896ca5 5586 'RW': '41.186.0.0/16',
773f291d
S
5587 'SA': '188.48.0.0/13',
5588 'SB': '202.1.160.0/19',
5589 'SC': '154.192.0.0/11',
53896ca5 5590 'SD': '102.120.0.0/13',
773f291d 5591 'SE': '78.64.0.0/12',
53896ca5 5592 'SG': '8.128.0.0/10',
773f291d
S
5593 'SI': '188.196.0.0/14',
5594 'SK': '78.98.0.0/15',
53896ca5 5595 'SL': '102.143.0.0/17',
773f291d
S
5596 'SM': '89.186.32.0/19',
5597 'SN': '41.82.0.0/15',
53896ca5 5598 'SO': '154.115.192.0/18',
773f291d
S
5599 'SR': '186.179.128.0/17',
5600 'SS': '105.235.208.0/21',
5601 'ST': '197.159.160.0/19',
5602 'SV': '168.243.0.0/16',
5603 'SX': '190.102.0.0/20',
5604 'SY': '5.0.0.0/16',
5605 'SZ': '41.84.224.0/19',
5606 'TC': '65.255.48.0/20',
5607 'TD': '154.68.128.0/19',
5608 'TG': '196.168.0.0/14',
5609 'TH': '171.96.0.0/13',
5610 'TJ': '85.9.128.0/18',
5611 'TK': '27.96.24.0/21',
5612 'TL': '180.189.160.0/20',
5613 'TM': '95.85.96.0/19',
5614 'TN': '197.0.0.0/11',
5615 'TO': '175.176.144.0/21',
5616 'TR': '78.160.0.0/11',
5617 'TT': '186.44.0.0/15',
5618 'TV': '202.2.96.0/19',
5619 'TW': '120.96.0.0/11',
5620 'TZ': '156.156.0.0/14',
53896ca5
S
5621 'UA': '37.52.0.0/14',
5622 'UG': '102.80.0.0/13',
5623 'US': '6.0.0.0/8',
773f291d 5624 'UY': '167.56.0.0/13',
53896ca5 5625 'UZ': '84.54.64.0/18',
773f291d 5626 'VA': '212.77.0.0/19',
53896ca5 5627 'VC': '207.191.240.0/21',
773f291d 5628 'VE': '186.88.0.0/13',
53896ca5 5629 'VG': '66.81.192.0/20',
773f291d
S
5630 'VI': '146.226.0.0/16',
5631 'VN': '14.160.0.0/11',
5632 'VU': '202.80.32.0/20',
5633 'WF': '117.20.32.0/21',
5634 'WS': '202.4.32.0/19',
5635 'YE': '134.35.0.0/16',
5636 'YT': '41.242.116.0/22',
5637 'ZA': '41.0.0.0/11',
53896ca5
S
5638 'ZM': '102.144.0.0/13',
5639 'ZW': '102.177.192.0/18',
773f291d
S
5640 }
5641
5642 @classmethod
5f95927a
S
5643 def random_ipv4(cls, code_or_block):
5644 if len(code_or_block) == 2:
5645 block = cls._country_ip_map.get(code_or_block.upper())
5646 if not block:
5647 return None
5648 else:
5649 block = code_or_block
773f291d
S
5650 addr, preflen = block.split('/')
5651 addr_min = compat_struct_unpack('!L', socket.inet_aton(addr))[0]
5652 addr_max = addr_min | (0xffffffff >> int(preflen))
18a0defa 5653 return compat_str(socket.inet_ntoa(
4248dad9 5654 compat_struct_pack('!L', random.randint(addr_min, addr_max))))
773f291d
S
5655
5656
91410c9b 5657class PerRequestProxyHandler(compat_urllib_request.ProxyHandler):
2461f79d
PH
5658 def __init__(self, proxies=None):
5659 # Set default handlers
5660 for type in ('http', 'https'):
5661 setattr(self, '%s_open' % type,
5662 lambda r, proxy='__noproxy__', type=type, meth=self.proxy_open:
5663 meth(r, proxy, type))
38e87f6c 5664 compat_urllib_request.ProxyHandler.__init__(self, proxies)
2461f79d 5665
91410c9b 5666 def proxy_open(self, req, proxy, type):
2461f79d 5667 req_proxy = req.headers.get('Ytdl-request-proxy')
91410c9b
PH
5668 if req_proxy is not None:
5669 proxy = req_proxy
2461f79d
PH
5670 del req.headers['Ytdl-request-proxy']
5671
5672 if proxy == '__noproxy__':
5673 return None # No Proxy
51fb4995 5674 if compat_urlparse.urlparse(proxy).scheme.lower() in ('socks', 'socks4', 'socks4a', 'socks5'):
71aff188 5675 req.add_header('Ytdl-socks-proxy', proxy)
7a5c1cfe 5676 # yt-dlp's http/https handlers do wrapping the socket with socks
71aff188 5677 return None
91410c9b
PH
5678 return compat_urllib_request.ProxyHandler.proxy_open(
5679 self, req, proxy, type)
5bc880b9
YCH
5680
5681
0a5445dd
YCH
5682# Both long_to_bytes and bytes_to_long are adapted from PyCrypto, which is
5683# released into Public Domain
5684# https://github.com/dlitz/pycrypto/blob/master/lib/Crypto/Util/number.py#L387
5685
5686def long_to_bytes(n, blocksize=0):
5687 """long_to_bytes(n:long, blocksize:int) : string
5688 Convert a long integer to a byte string.
5689
5690 If optional blocksize is given and greater than zero, pad the front of the
5691 byte string with binary zeros so that the length is a multiple of
5692 blocksize.
5693 """
5694 # after much testing, this algorithm was deemed to be the fastest
5695 s = b''
5696 n = int(n)
5697 while n > 0:
5698 s = compat_struct_pack('>I', n & 0xffffffff) + s
5699 n = n >> 32
5700 # strip off leading zeros
5701 for i in range(len(s)):
5702 if s[i] != b'\000'[0]:
5703 break
5704 else:
5705 # only happens when n == 0
5706 s = b'\000'
5707 i = 0
5708 s = s[i:]
5709 # add back some pad bytes. this could be done more efficiently w.r.t. the
5710 # de-padding being done above, but sigh...
5711 if blocksize > 0 and len(s) % blocksize:
5712 s = (blocksize - len(s) % blocksize) * b'\000' + s
5713 return s
5714
5715
5716def bytes_to_long(s):
5717 """bytes_to_long(string) : long
5718 Convert a byte string to a long integer.
5719
5720 This is (essentially) the inverse of long_to_bytes().
5721 """
5722 acc = 0
5723 length = len(s)
5724 if length % 4:
5725 extra = (4 - length % 4)
5726 s = b'\000' * extra + s
5727 length = length + extra
5728 for i in range(0, length, 4):
5729 acc = (acc << 32) + compat_struct_unpack('>I', s[i:i + 4])[0]
5730 return acc
5731
5732
5bc880b9
YCH
5733def ohdave_rsa_encrypt(data, exponent, modulus):
5734 '''
5735 Implement OHDave's RSA algorithm. See http://www.ohdave.com/rsa/
5736
5737 Input:
5738 data: data to encrypt, bytes-like object
5739 exponent, modulus: parameter e and N of RSA algorithm, both integer
5740 Output: hex string of encrypted data
5741
5742 Limitation: supports one block encryption only
5743 '''
5744
5745 payload = int(binascii.hexlify(data[::-1]), 16)
5746 encrypted = pow(payload, exponent, modulus)
5747 return '%x' % encrypted
81bdc8fd
YCH
5748
5749
f48409c7
YCH
5750def pkcs1pad(data, length):
5751 """
5752 Padding input data with PKCS#1 scheme
5753
5754 @param {int[]} data input data
5755 @param {int} length target length
5756 @returns {int[]} padded data
5757 """
5758 if len(data) > length - 11:
5759 raise ValueError('Input data too long for PKCS#1 padding')
5760
5761 pseudo_random = [random.randint(0, 254) for _ in range(length - len(data) - 3)]
5762 return [0, 2] + pseudo_random + [0] + data
5763
5764
5eb6bdce 5765def encode_base_n(num, n, table=None):
59f898b7 5766 FULL_TABLE = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
59f898b7
YCH
5767 if not table:
5768 table = FULL_TABLE[:n]
5769
5eb6bdce
YCH
5770 if n > len(table):
5771 raise ValueError('base %d exceeds table length %d' % (n, len(table)))
5772
5773 if num == 0:
5774 return table[0]
5775
81bdc8fd
YCH
5776 ret = ''
5777 while num:
5778 ret = table[num % n] + ret
5779 num = num // n
5780 return ret
f52354a8
YCH
5781
5782
5783def decode_packed_codes(code):
06b3fe29 5784 mobj = re.search(PACKED_CODES_RE, code)
a0566bbf 5785 obfuscated_code, base, count, symbols = mobj.groups()
f52354a8
YCH
5786 base = int(base)
5787 count = int(count)
5788 symbols = symbols.split('|')
5789 symbol_table = {}
5790
5791 while count:
5792 count -= 1
5eb6bdce 5793 base_n_count = encode_base_n(count, base)
f52354a8
YCH
5794 symbol_table[base_n_count] = symbols[count] or base_n_count
5795
5796 return re.sub(
5797 r'\b(\w+)\b', lambda mobj: symbol_table[mobj.group(0)],
a0566bbf 5798 obfuscated_code)
e154c651 5799
5800
1ced2221
S
5801def caesar(s, alphabet, shift):
5802 if shift == 0:
5803 return s
5804 l = len(alphabet)
5805 return ''.join(
5806 alphabet[(alphabet.index(c) + shift) % l] if c in alphabet else c
5807 for c in s)
5808
5809
5810def rot47(s):
5811 return caesar(s, r'''!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~''', 47)
5812
5813
e154c651 5814def parse_m3u8_attributes(attrib):
5815 info = {}
5816 for (key, val) in re.findall(r'(?P<key>[A-Z0-9-]+)=(?P<val>"[^"]+"|[^",]+)(?:,|$)', attrib):
5817 if val.startswith('"'):
5818 val = val[1:-1]
5819 info[key] = val
5820 return info
1143535d
YCH
5821
5822
5823def urshift(val, n):
5824 return val >> n if val >= 0 else (val + 0x100000000) >> n
d3f8e038
YCH
5825
5826
5827# Based on png2str() written by @gdkchan and improved by @yokrysty
067aa17e 5828# Originally posted at https://github.com/ytdl-org/youtube-dl/issues/9706
d3f8e038
YCH
5829def decode_png(png_data):
5830 # Reference: https://www.w3.org/TR/PNG/
5831 header = png_data[8:]
5832
5833 if png_data[:8] != b'\x89PNG\x0d\x0a\x1a\x0a' or header[4:8] != b'IHDR':
5834 raise IOError('Not a valid PNG file.')
5835
5836 int_map = {1: '>B', 2: '>H', 4: '>I'}
5837 unpack_integer = lambda x: compat_struct_unpack(int_map[len(x)], x)[0]
5838
5839 chunks = []
5840
5841 while header:
5842 length = unpack_integer(header[:4])
5843 header = header[4:]
5844
5845 chunk_type = header[:4]
5846 header = header[4:]
5847
5848 chunk_data = header[:length]
5849 header = header[length:]
5850
5851 header = header[4:] # Skip CRC
5852
5853 chunks.append({
5854 'type': chunk_type,
5855 'length': length,
5856 'data': chunk_data
5857 })
5858
5859 ihdr = chunks[0]['data']
5860
5861 width = unpack_integer(ihdr[:4])
5862 height = unpack_integer(ihdr[4:8])
5863
5864 idat = b''
5865
5866 for chunk in chunks:
5867 if chunk['type'] == b'IDAT':
5868 idat += chunk['data']
5869
5870 if not idat:
5871 raise IOError('Unable to read PNG data.')
5872
5873 decompressed_data = bytearray(zlib.decompress(idat))
5874
5875 stride = width * 3
5876 pixels = []
5877
5878 def _get_pixel(idx):
5879 x = idx % stride
5880 y = idx // stride
5881 return pixels[y][x]
5882
5883 for y in range(height):
5884 basePos = y * (1 + stride)
5885 filter_type = decompressed_data[basePos]
5886
5887 current_row = []
5888
5889 pixels.append(current_row)
5890
5891 for x in range(stride):
5892 color = decompressed_data[1 + basePos + x]
5893 basex = y * stride + x
5894 left = 0
5895 up = 0
5896
5897 if x > 2:
5898 left = _get_pixel(basex - 3)
5899 if y > 0:
5900 up = _get_pixel(basex - stride)
5901
5902 if filter_type == 1: # Sub
5903 color = (color + left) & 0xff
5904 elif filter_type == 2: # Up
5905 color = (color + up) & 0xff
5906 elif filter_type == 3: # Average
5907 color = (color + ((left + up) >> 1)) & 0xff
5908 elif filter_type == 4: # Paeth
5909 a = left
5910 b = up
5911 c = 0
5912
5913 if x > 2 and y > 0:
5914 c = _get_pixel(basex - stride - 3)
5915
5916 p = a + b - c
5917
5918 pa = abs(p - a)
5919 pb = abs(p - b)
5920 pc = abs(p - c)
5921
5922 if pa <= pb and pa <= pc:
5923 color = (color + a) & 0xff
5924 elif pb <= pc:
5925 color = (color + b) & 0xff
5926 else:
5927 color = (color + c) & 0xff
5928
5929 current_row.append(color)
5930
5931 return width, height, pixels
efa97bdc
YCH
5932
5933
5934def write_xattr(path, key, value):
5935 # This mess below finds the best xattr tool for the job
5936 try:
5937 # try the pyxattr module...
5938 import xattr
5939
53a7e3d2
YCH
5940 if hasattr(xattr, 'set'): # pyxattr
5941 # Unicode arguments are not supported in python-pyxattr until
5942 # version 0.5.0
067aa17e 5943 # See https://github.com/ytdl-org/youtube-dl/issues/5498
53a7e3d2
YCH
5944 pyxattr_required_version = '0.5.0'
5945 if version_tuple(xattr.__version__) < version_tuple(pyxattr_required_version):
5946 # TODO: fallback to CLI tools
5947 raise XAttrUnavailableError(
5948 'python-pyxattr is detected but is too old. '
7a5c1cfe 5949 'yt-dlp requires %s or above while your version is %s. '
53a7e3d2
YCH
5950 'Falling back to other xattr implementations' % (
5951 pyxattr_required_version, xattr.__version__))
5952
5953 setxattr = xattr.set
5954 else: # xattr
5955 setxattr = xattr.setxattr
efa97bdc
YCH
5956
5957 try:
53a7e3d2 5958 setxattr(path, key, value)
efa97bdc
YCH
5959 except EnvironmentError as e:
5960 raise XAttrMetadataError(e.errno, e.strerror)
5961
5962 except ImportError:
5963 if compat_os_name == 'nt':
5964 # Write xattrs to NTFS Alternate Data Streams:
5965 # http://en.wikipedia.org/wiki/NTFS#Alternate_data_streams_.28ADS.29
5966 assert ':' not in key
5967 assert os.path.exists(path)
5968
5969 ads_fn = path + ':' + key
5970 try:
5971 with open(ads_fn, 'wb') as f:
5972 f.write(value)
5973 except EnvironmentError as e:
5974 raise XAttrMetadataError(e.errno, e.strerror)
5975 else:
5976 user_has_setfattr = check_executable('setfattr', ['--version'])
5977 user_has_xattr = check_executable('xattr', ['-h'])
5978
5979 if user_has_setfattr or user_has_xattr:
5980
5981 value = value.decode('utf-8')
5982 if user_has_setfattr:
5983 executable = 'setfattr'
5984 opts = ['-n', key, '-v', value]
5985 elif user_has_xattr:
5986 executable = 'xattr'
5987 opts = ['-w', key, value]
5988
3089bc74
S
5989 cmd = ([encodeFilename(executable, True)]
5990 + [encodeArgument(o) for o in opts]
5991 + [encodeFilename(path, True)])
efa97bdc
YCH
5992
5993 try:
5994 p = subprocess.Popen(
5995 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
5996 except EnvironmentError as e:
5997 raise XAttrMetadataError(e.errno, e.strerror)
f5b1bca9 5998 stdout, stderr = process_communicate_or_kill(p)
efa97bdc
YCH
5999 stderr = stderr.decode('utf-8', 'replace')
6000 if p.returncode != 0:
6001 raise XAttrMetadataError(p.returncode, stderr)
6002
6003 else:
6004 # On Unix, and can't find pyxattr, setfattr, or xattr.
6005 if sys.platform.startswith('linux'):
6006 raise XAttrUnavailableError(
6007 "Couldn't find a tool to set the xattrs. "
6008 "Install either the python 'pyxattr' or 'xattr' "
6009 "modules, or the GNU 'attr' package "
6010 "(which contains the 'setfattr' tool).")
6011 else:
6012 raise XAttrUnavailableError(
6013 "Couldn't find a tool to set the xattrs. "
6014 "Install either the python 'xattr' module, "
6015 "or the 'xattr' binary.")
0c265486
YCH
6016
6017
6018def random_birthday(year_field, month_field, day_field):
aa374bc7
AS
6019 start_date = datetime.date(1950, 1, 1)
6020 end_date = datetime.date(1995, 12, 31)
6021 offset = random.randint(0, (end_date - start_date).days)
6022 random_date = start_date + datetime.timedelta(offset)
0c265486 6023 return {
aa374bc7
AS
6024 year_field: str(random_date.year),
6025 month_field: str(random_date.month),
6026 day_field: str(random_date.day),
0c265486 6027 }
732044af 6028
c76eb41b 6029
732044af 6030# Templates for internet shortcut files, which are plain text files.
6031DOT_URL_LINK_TEMPLATE = '''
6032[InternetShortcut]
6033URL=%(url)s
6034'''.lstrip()
6035
6036DOT_WEBLOC_LINK_TEMPLATE = '''
6037<?xml version="1.0" encoding="UTF-8"?>
6038<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
6039<plist version="1.0">
6040<dict>
6041\t<key>URL</key>
6042\t<string>%(url)s</string>
6043</dict>
6044</plist>
6045'''.lstrip()
6046
6047DOT_DESKTOP_LINK_TEMPLATE = '''
6048[Desktop Entry]
6049Encoding=UTF-8
6050Name=%(filename)s
6051Type=Link
6052URL=%(url)s
6053Icon=text-html
6054'''.lstrip()
6055
6056
6057def iri_to_uri(iri):
6058 """
6059 Converts an IRI (Internationalized Resource Identifier, allowing Unicode characters) to a URI (Uniform Resource Identifier, ASCII-only).
6060
6061 The function doesn't add an additional layer of escaping; e.g., it doesn't escape `%3C` as `%253C`. Instead, it percent-escapes characters with an underlying UTF-8 encoding *besides* those already escaped, leaving the URI intact.
6062 """
6063
6064 iri_parts = compat_urllib_parse_urlparse(iri)
6065
6066 if '[' in iri_parts.netloc:
6067 raise ValueError('IPv6 URIs are not, yet, supported.')
6068 # Querying `.netloc`, when there's only one bracket, also raises a ValueError.
6069
6070 # The `safe` argument values, that the following code uses, contain the characters that should not be percent-encoded. Everything else but letters, digits and '_.-' will be percent-encoded with an underlying UTF-8 encoding. Everything already percent-encoded will be left as is.
6071
6072 net_location = ''
6073 if iri_parts.username:
6074 net_location += compat_urllib_parse_quote(iri_parts.username, safe=r"!$%&'()*+,~")
6075 if iri_parts.password is not None:
6076 net_location += ':' + compat_urllib_parse_quote(iri_parts.password, safe=r"!$%&'()*+,~")
6077 net_location += '@'
6078
6079 net_location += iri_parts.hostname.encode('idna').decode('utf-8') # Punycode for Unicode hostnames.
6080 # The 'idna' encoding produces ASCII text.
6081 if iri_parts.port is not None and iri_parts.port != 80:
6082 net_location += ':' + str(iri_parts.port)
6083
6084 return compat_urllib_parse_urlunparse(
6085 (iri_parts.scheme,
6086 net_location,
6087
6088 compat_urllib_parse_quote_plus(iri_parts.path, safe=r"!$%&'()*+,/:;=@|~"),
6089
6090 # Unsure about the `safe` argument, since this is a legacy way of handling parameters.
6091 compat_urllib_parse_quote_plus(iri_parts.params, safe=r"!$%&'()*+,/:;=@|~"),
6092
6093 # Not totally sure about the `safe` argument, since the source does not explicitly mention the query URI component.
6094 compat_urllib_parse_quote_plus(iri_parts.query, safe=r"!$%&'()*+,/:;=?@{|}~"),
6095
6096 compat_urllib_parse_quote_plus(iri_parts.fragment, safe=r"!#$%&'()*+,/:;=?@{|}~")))
6097
6098 # Source for `safe` arguments: https://url.spec.whatwg.org/#percent-encoded-bytes.
6099
6100
6101def to_high_limit_path(path):
6102 if sys.platform in ['win32', 'cygwin']:
6103 # Work around MAX_PATH limitation on Windows. The maximum allowed length for the individual path segments may still be quite limited.
6104 return r'\\?\ '.rstrip() + os.path.abspath(path)
6105
6106 return path
76d321f6 6107
c76eb41b 6108
76d321f6 6109def format_field(obj, field, template='%s', ignore=(None, ''), default='', func=None):
6110 val = obj.get(field, default)
6111 if func and val not in ignore:
6112 val = func(val)
6113 return template % val if val not in ignore else default
00dd0cd5 6114
6115
6116def clean_podcast_url(url):
6117 return re.sub(r'''(?x)
6118 (?:
6119 (?:
6120 chtbl\.com/track|
6121 media\.blubrry\.com| # https://create.blubrry.com/resources/podcast-media-download-statistics/getting-started/
6122 play\.podtrac\.com
6123 )/[^/]+|
6124 (?:dts|www)\.podtrac\.com/(?:pts/)?redirect\.[0-9a-z]{3,4}| # http://analytics.podtrac.com/how-to-measure
6125 flex\.acast\.com|
6126 pd(?:
6127 cn\.co| # https://podcorn.com/analytics-prefix/
6128 st\.fm # https://podsights.com/docs/
6129 )/e
6130 )/''', '', url)
ffcb8191
THD
6131
6132
6133_HEX_TABLE = '0123456789abcdef'
6134
6135
6136def random_uuidv4():
6137 return re.sub(r'[xy]', lambda x: _HEX_TABLE[random.randint(0, 15)], 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx')
0202b52a 6138
6139
6140def make_dir(path, to_screen=None):
6141 try:
6142 dn = os.path.dirname(path)
6143 if dn and not os.path.exists(dn):
6144 os.makedirs(dn)
6145 return True
6146 except (OSError, IOError) as err:
6147 if callable(to_screen) is not None:
6148 to_screen('unable to create directory ' + error_to_compat_str(err))
6149 return False
f74980cb 6150
6151
6152def get_executable_path():
c552ae88 6153 from zipimport import zipimporter
6154 if hasattr(sys, 'frozen'): # Running from PyInstaller
6155 path = os.path.dirname(sys.executable)
6156 elif isinstance(globals().get('__loader__'), zipimporter): # Running from ZIP
6157 path = os.path.join(os.path.dirname(__file__), '../..')
6158 else:
6159 path = os.path.join(os.path.dirname(__file__), '..')
f74980cb 6160 return os.path.abspath(path)
6161
6162
2f567473 6163def load_plugins(name, suffix, namespace):
f74980cb 6164 plugin_info = [None]
6165 classes = []
6166 try:
6167 plugin_info = imp.find_module(
6168 name, [os.path.join(get_executable_path(), 'ytdlp_plugins')])
6169 plugins = imp.load_module(name, *plugin_info)
6170 for name in dir(plugins):
2f567473 6171 if name in namespace:
6172 continue
6173 if not name.endswith(suffix):
f74980cb 6174 continue
6175 klass = getattr(plugins, name)
6176 classes.append(klass)
6177 namespace[name] = klass
6178 except ImportError:
6179 pass
6180 finally:
6181 if plugin_info[0] is not None:
6182 plugin_info[0].close()
6183 return classes
06167fbb 6184
6185
324ad820 6186def traverse_obj(obj, keys, *, casesense=True, is_user_input=False, traverse_string=False):
6187 ''' Traverse nested list/dict/tuple
6188 @param casesense Whether to consider dictionary keys as case sensitive
6189 @param is_user_input Whether the keys are generated from user input. If True,
6190 strings are converted to int/slice if necessary
6191 @param traverse_string Whether to traverse inside strings. If True, any
6192 non-compatible object will also be converted into a string
6193 '''
a439a3a4 6194 keys = list(keys)[::-1]
6195 while keys:
6196 key = keys.pop()
324ad820 6197 if isinstance(obj, dict):
6198 assert isinstance(key, compat_str)
a439a3a4 6199 if not casesense:
324ad820 6200 obj = {k.lower(): v for k, v in obj.items()}
a439a3a4 6201 key = key.lower()
324ad820 6202 obj = obj.get(key)
a439a3a4 6203 else:
324ad820 6204 if is_user_input:
6205 key = (int_or_none(key) if ':' not in key
6206 else slice(*map(int_or_none, key.split(':'))))
6207 if not isinstance(obj, (list, tuple)):
6208 if traverse_string:
6209 obj = compat_str(obj)
6210 else:
6211 return None
6212 assert isinstance(key, (int, slice))
6213 obj = try_get(obj, lambda x: x[key])
6214 return obj
6215
6216
6217def traverse_dict(dictn, keys, casesense=True):
6218 ''' For backward compatibility. Do not use '''
6219 return traverse_obj(dictn, keys, casesense=casesense,
6220 is_user_input=True, traverse_string=True)