]> jfr.im git - yt-dlp.git/blame - yt_dlp/utils.py
[EmbedSubtitle, SubtitlesConvertor] Fix error when subtitle file is missing
[yt-dlp.git] / yt_dlp / utils.py
CommitLineData
cc52de43 1#!/usr/bin/env python3
dcdb292f 2# coding: utf-8
d77c3dfd 3
ecc0c5ee
PH
4from __future__ import unicode_literals
5
1e399778 6import base64
5bc880b9 7import binascii
912b38b4 8import calendar
676eb3f2 9import codecs
c380cc28 10import collections
62e609ab 11import contextlib
e3946f98 12import ctypes
c496ca96
PH
13import datetime
14import email.utils
0c265486 15import email.header
f45c185f 16import errno
be4a824d 17import functools
d77c3dfd 18import gzip
49fa4d9a
N
19import hashlib
20import hmac
f74980cb 21import imp
03f9daab 22import io
79a2e94e 23import itertools
f4bfd65f 24import json
d77c3dfd 25import locale
02dbf93f 26import math
347de493 27import operator
d77c3dfd 28import os
c496ca96 29import platform
773f291d 30import random
d77c3dfd 31import re
c496ca96 32import socket
79a2e94e 33import ssl
1c088fa8 34import subprocess
d77c3dfd 35import sys
181c8655 36import tempfile
c380cc28 37import time
01951dda 38import traceback
bcf89ce6 39import xml.etree.ElementTree
d77c3dfd 40import zlib
d77c3dfd 41
8c25f81b 42from .compat import (
b4a3d461 43 compat_HTMLParseError,
8bb56eee 44 compat_HTMLParser,
201c1459 45 compat_HTTPError,
8f9312c3 46 compat_basestring,
8c25f81b 47 compat_chr,
1bab3437 48 compat_cookiejar,
d7cd9a9e 49 compat_ctypes_WINFUNCTYPE,
36e6f62c 50 compat_etree_fromstring,
51098426 51 compat_expanduser,
8c25f81b 52 compat_html_entities,
55b2f099 53 compat_html_entities_html5,
be4a824d 54 compat_http_client,
42db58ec 55 compat_integer_types,
e29663c6 56 compat_numeric_types,
c86b6142 57 compat_kwargs,
efa97bdc 58 compat_os_name,
8c25f81b 59 compat_parse_qs,
702ccf2d 60 compat_shlex_quote,
8c25f81b 61 compat_str,
edaa23f8 62 compat_struct_pack,
d3f8e038 63 compat_struct_unpack,
8c25f81b
PH
64 compat_urllib_error,
65 compat_urllib_parse,
15707c7e 66 compat_urllib_parse_urlencode,
8c25f81b 67 compat_urllib_parse_urlparse,
732044af 68 compat_urllib_parse_urlunparse,
69 compat_urllib_parse_quote,
70 compat_urllib_parse_quote_plus,
7581bfc9 71 compat_urllib_parse_unquote_plus,
8c25f81b
PH
72 compat_urllib_request,
73 compat_urlparse,
810c10ba 74 compat_xpath,
8c25f81b 75)
4644ac55 76
71aff188
YCH
77from .socks import (
78 ProxyType,
79 sockssocket,
80)
81
4644ac55 82
51fb4995
YCH
83def register_socks_protocols():
84 # "Register" SOCKS protocols
d5ae6bb5
YCH
85 # In Python < 2.6.5, urlsplit() suffers from bug https://bugs.python.org/issue7904
86 # URLs with protocols not in urlparse.uses_netloc are not handled correctly
51fb4995
YCH
87 for scheme in ('socks', 'socks4', 'socks4a', 'socks5'):
88 if scheme not in compat_urlparse.uses_netloc:
89 compat_urlparse.uses_netloc.append(scheme)
90
91
468e2e92
FV
92# This is not clearly defined otherwise
93compiled_regex_type = type(re.compile(''))
94
f7a147e3
S
95
96def random_user_agent():
97 _USER_AGENT_TPL = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36'
98 _CHROME_VERSIONS = (
99 '74.0.3729.129',
100 '76.0.3780.3',
101 '76.0.3780.2',
102 '74.0.3729.128',
103 '76.0.3780.1',
104 '76.0.3780.0',
105 '75.0.3770.15',
106 '74.0.3729.127',
107 '74.0.3729.126',
108 '76.0.3779.1',
109 '76.0.3779.0',
110 '75.0.3770.14',
111 '74.0.3729.125',
112 '76.0.3778.1',
113 '76.0.3778.0',
114 '75.0.3770.13',
115 '74.0.3729.124',
116 '74.0.3729.123',
117 '73.0.3683.121',
118 '76.0.3777.1',
119 '76.0.3777.0',
120 '75.0.3770.12',
121 '74.0.3729.122',
122 '76.0.3776.4',
123 '75.0.3770.11',
124 '74.0.3729.121',
125 '76.0.3776.3',
126 '76.0.3776.2',
127 '73.0.3683.120',
128 '74.0.3729.120',
129 '74.0.3729.119',
130 '74.0.3729.118',
131 '76.0.3776.1',
132 '76.0.3776.0',
133 '76.0.3775.5',
134 '75.0.3770.10',
135 '74.0.3729.117',
136 '76.0.3775.4',
137 '76.0.3775.3',
138 '74.0.3729.116',
139 '75.0.3770.9',
140 '76.0.3775.2',
141 '76.0.3775.1',
142 '76.0.3775.0',
143 '75.0.3770.8',
144 '74.0.3729.115',
145 '74.0.3729.114',
146 '76.0.3774.1',
147 '76.0.3774.0',
148 '75.0.3770.7',
149 '74.0.3729.113',
150 '74.0.3729.112',
151 '74.0.3729.111',
152 '76.0.3773.1',
153 '76.0.3773.0',
154 '75.0.3770.6',
155 '74.0.3729.110',
156 '74.0.3729.109',
157 '76.0.3772.1',
158 '76.0.3772.0',
159 '75.0.3770.5',
160 '74.0.3729.108',
161 '74.0.3729.107',
162 '76.0.3771.1',
163 '76.0.3771.0',
164 '75.0.3770.4',
165 '74.0.3729.106',
166 '74.0.3729.105',
167 '75.0.3770.3',
168 '74.0.3729.104',
169 '74.0.3729.103',
170 '74.0.3729.102',
171 '75.0.3770.2',
172 '74.0.3729.101',
173 '75.0.3770.1',
174 '75.0.3770.0',
175 '74.0.3729.100',
176 '75.0.3769.5',
177 '75.0.3769.4',
178 '74.0.3729.99',
179 '75.0.3769.3',
180 '75.0.3769.2',
181 '75.0.3768.6',
182 '74.0.3729.98',
183 '75.0.3769.1',
184 '75.0.3769.0',
185 '74.0.3729.97',
186 '73.0.3683.119',
187 '73.0.3683.118',
188 '74.0.3729.96',
189 '75.0.3768.5',
190 '75.0.3768.4',
191 '75.0.3768.3',
192 '75.0.3768.2',
193 '74.0.3729.95',
194 '74.0.3729.94',
195 '75.0.3768.1',
196 '75.0.3768.0',
197 '74.0.3729.93',
198 '74.0.3729.92',
199 '73.0.3683.117',
200 '74.0.3729.91',
201 '75.0.3766.3',
202 '74.0.3729.90',
203 '75.0.3767.2',
204 '75.0.3767.1',
205 '75.0.3767.0',
206 '74.0.3729.89',
207 '73.0.3683.116',
208 '75.0.3766.2',
209 '74.0.3729.88',
210 '75.0.3766.1',
211 '75.0.3766.0',
212 '74.0.3729.87',
213 '73.0.3683.115',
214 '74.0.3729.86',
215 '75.0.3765.1',
216 '75.0.3765.0',
217 '74.0.3729.85',
218 '73.0.3683.114',
219 '74.0.3729.84',
220 '75.0.3764.1',
221 '75.0.3764.0',
222 '74.0.3729.83',
223 '73.0.3683.113',
224 '75.0.3763.2',
225 '75.0.3761.4',
226 '74.0.3729.82',
227 '75.0.3763.1',
228 '75.0.3763.0',
229 '74.0.3729.81',
230 '73.0.3683.112',
231 '75.0.3762.1',
232 '75.0.3762.0',
233 '74.0.3729.80',
234 '75.0.3761.3',
235 '74.0.3729.79',
236 '73.0.3683.111',
237 '75.0.3761.2',
238 '74.0.3729.78',
239 '74.0.3729.77',
240 '75.0.3761.1',
241 '75.0.3761.0',
242 '73.0.3683.110',
243 '74.0.3729.76',
244 '74.0.3729.75',
245 '75.0.3760.0',
246 '74.0.3729.74',
247 '75.0.3759.8',
248 '75.0.3759.7',
249 '75.0.3759.6',
250 '74.0.3729.73',
251 '75.0.3759.5',
252 '74.0.3729.72',
253 '73.0.3683.109',
254 '75.0.3759.4',
255 '75.0.3759.3',
256 '74.0.3729.71',
257 '75.0.3759.2',
258 '74.0.3729.70',
259 '73.0.3683.108',
260 '74.0.3729.69',
261 '75.0.3759.1',
262 '75.0.3759.0',
263 '74.0.3729.68',
264 '73.0.3683.107',
265 '74.0.3729.67',
266 '75.0.3758.1',
267 '75.0.3758.0',
268 '74.0.3729.66',
269 '73.0.3683.106',
270 '74.0.3729.65',
271 '75.0.3757.1',
272 '75.0.3757.0',
273 '74.0.3729.64',
274 '73.0.3683.105',
275 '74.0.3729.63',
276 '75.0.3756.1',
277 '75.0.3756.0',
278 '74.0.3729.62',
279 '73.0.3683.104',
280 '75.0.3755.3',
281 '75.0.3755.2',
282 '73.0.3683.103',
283 '75.0.3755.1',
284 '75.0.3755.0',
285 '74.0.3729.61',
286 '73.0.3683.102',
287 '74.0.3729.60',
288 '75.0.3754.2',
289 '74.0.3729.59',
290 '75.0.3753.4',
291 '74.0.3729.58',
292 '75.0.3754.1',
293 '75.0.3754.0',
294 '74.0.3729.57',
295 '73.0.3683.101',
296 '75.0.3753.3',
297 '75.0.3752.2',
298 '75.0.3753.2',
299 '74.0.3729.56',
300 '75.0.3753.1',
301 '75.0.3753.0',
302 '74.0.3729.55',
303 '73.0.3683.100',
304 '74.0.3729.54',
305 '75.0.3752.1',
306 '75.0.3752.0',
307 '74.0.3729.53',
308 '73.0.3683.99',
309 '74.0.3729.52',
310 '75.0.3751.1',
311 '75.0.3751.0',
312 '74.0.3729.51',
313 '73.0.3683.98',
314 '74.0.3729.50',
315 '75.0.3750.0',
316 '74.0.3729.49',
317 '74.0.3729.48',
318 '74.0.3729.47',
319 '75.0.3749.3',
320 '74.0.3729.46',
321 '73.0.3683.97',
322 '75.0.3749.2',
323 '74.0.3729.45',
324 '75.0.3749.1',
325 '75.0.3749.0',
326 '74.0.3729.44',
327 '73.0.3683.96',
328 '74.0.3729.43',
329 '74.0.3729.42',
330 '75.0.3748.1',
331 '75.0.3748.0',
332 '74.0.3729.41',
333 '75.0.3747.1',
334 '73.0.3683.95',
335 '75.0.3746.4',
336 '74.0.3729.40',
337 '74.0.3729.39',
338 '75.0.3747.0',
339 '75.0.3746.3',
340 '75.0.3746.2',
341 '74.0.3729.38',
342 '75.0.3746.1',
343 '75.0.3746.0',
344 '74.0.3729.37',
345 '73.0.3683.94',
346 '75.0.3745.5',
347 '75.0.3745.4',
348 '75.0.3745.3',
349 '75.0.3745.2',
350 '74.0.3729.36',
351 '75.0.3745.1',
352 '75.0.3745.0',
353 '75.0.3744.2',
354 '74.0.3729.35',
355 '73.0.3683.93',
356 '74.0.3729.34',
357 '75.0.3744.1',
358 '75.0.3744.0',
359 '74.0.3729.33',
360 '73.0.3683.92',
361 '74.0.3729.32',
362 '74.0.3729.31',
363 '73.0.3683.91',
364 '75.0.3741.2',
365 '75.0.3740.5',
366 '74.0.3729.30',
367 '75.0.3741.1',
368 '75.0.3741.0',
369 '74.0.3729.29',
370 '75.0.3740.4',
371 '73.0.3683.90',
372 '74.0.3729.28',
373 '75.0.3740.3',
374 '73.0.3683.89',
375 '75.0.3740.2',
376 '74.0.3729.27',
377 '75.0.3740.1',
378 '75.0.3740.0',
379 '74.0.3729.26',
380 '73.0.3683.88',
381 '73.0.3683.87',
382 '74.0.3729.25',
383 '75.0.3739.1',
384 '75.0.3739.0',
385 '73.0.3683.86',
386 '74.0.3729.24',
387 '73.0.3683.85',
388 '75.0.3738.4',
389 '75.0.3738.3',
390 '75.0.3738.2',
391 '75.0.3738.1',
392 '75.0.3738.0',
393 '74.0.3729.23',
394 '73.0.3683.84',
395 '74.0.3729.22',
396 '74.0.3729.21',
397 '75.0.3737.1',
398 '75.0.3737.0',
399 '74.0.3729.20',
400 '73.0.3683.83',
401 '74.0.3729.19',
402 '75.0.3736.1',
403 '75.0.3736.0',
404 '74.0.3729.18',
405 '73.0.3683.82',
406 '74.0.3729.17',
407 '75.0.3735.1',
408 '75.0.3735.0',
409 '74.0.3729.16',
410 '73.0.3683.81',
411 '75.0.3734.1',
412 '75.0.3734.0',
413 '74.0.3729.15',
414 '73.0.3683.80',
415 '74.0.3729.14',
416 '75.0.3733.1',
417 '75.0.3733.0',
418 '75.0.3732.1',
419 '74.0.3729.13',
420 '74.0.3729.12',
421 '73.0.3683.79',
422 '74.0.3729.11',
423 '75.0.3732.0',
424 '74.0.3729.10',
425 '73.0.3683.78',
426 '74.0.3729.9',
427 '74.0.3729.8',
428 '74.0.3729.7',
429 '75.0.3731.3',
430 '75.0.3731.2',
431 '75.0.3731.0',
432 '74.0.3729.6',
433 '73.0.3683.77',
434 '73.0.3683.76',
435 '75.0.3730.5',
436 '75.0.3730.4',
437 '73.0.3683.75',
438 '74.0.3729.5',
439 '73.0.3683.74',
440 '75.0.3730.3',
441 '75.0.3730.2',
442 '74.0.3729.4',
443 '73.0.3683.73',
444 '73.0.3683.72',
445 '75.0.3730.1',
446 '75.0.3730.0',
447 '74.0.3729.3',
448 '73.0.3683.71',
449 '74.0.3729.2',
450 '73.0.3683.70',
451 '74.0.3729.1',
452 '74.0.3729.0',
453 '74.0.3726.4',
454 '73.0.3683.69',
455 '74.0.3726.3',
456 '74.0.3728.0',
457 '74.0.3726.2',
458 '73.0.3683.68',
459 '74.0.3726.1',
460 '74.0.3726.0',
461 '74.0.3725.4',
462 '73.0.3683.67',
463 '73.0.3683.66',
464 '74.0.3725.3',
465 '74.0.3725.2',
466 '74.0.3725.1',
467 '74.0.3724.8',
468 '74.0.3725.0',
469 '73.0.3683.65',
470 '74.0.3724.7',
471 '74.0.3724.6',
472 '74.0.3724.5',
473 '74.0.3724.4',
474 '74.0.3724.3',
475 '74.0.3724.2',
476 '74.0.3724.1',
477 '74.0.3724.0',
478 '73.0.3683.64',
479 '74.0.3723.1',
480 '74.0.3723.0',
481 '73.0.3683.63',
482 '74.0.3722.1',
483 '74.0.3722.0',
484 '73.0.3683.62',
485 '74.0.3718.9',
486 '74.0.3702.3',
487 '74.0.3721.3',
488 '74.0.3721.2',
489 '74.0.3721.1',
490 '74.0.3721.0',
491 '74.0.3720.6',
492 '73.0.3683.61',
493 '72.0.3626.122',
494 '73.0.3683.60',
495 '74.0.3720.5',
496 '72.0.3626.121',
497 '74.0.3718.8',
498 '74.0.3720.4',
499 '74.0.3720.3',
500 '74.0.3718.7',
501 '74.0.3720.2',
502 '74.0.3720.1',
503 '74.0.3720.0',
504 '74.0.3718.6',
505 '74.0.3719.5',
506 '73.0.3683.59',
507 '74.0.3718.5',
508 '74.0.3718.4',
509 '74.0.3719.4',
510 '74.0.3719.3',
511 '74.0.3719.2',
512 '74.0.3719.1',
513 '73.0.3683.58',
514 '74.0.3719.0',
515 '73.0.3683.57',
516 '73.0.3683.56',
517 '74.0.3718.3',
518 '73.0.3683.55',
519 '74.0.3718.2',
520 '74.0.3718.1',
521 '74.0.3718.0',
522 '73.0.3683.54',
523 '74.0.3717.2',
524 '73.0.3683.53',
525 '74.0.3717.1',
526 '74.0.3717.0',
527 '73.0.3683.52',
528 '74.0.3716.1',
529 '74.0.3716.0',
530 '73.0.3683.51',
531 '74.0.3715.1',
532 '74.0.3715.0',
533 '73.0.3683.50',
534 '74.0.3711.2',
535 '74.0.3714.2',
536 '74.0.3713.3',
537 '74.0.3714.1',
538 '74.0.3714.0',
539 '73.0.3683.49',
540 '74.0.3713.1',
541 '74.0.3713.0',
542 '72.0.3626.120',
543 '73.0.3683.48',
544 '74.0.3712.2',
545 '74.0.3712.1',
546 '74.0.3712.0',
547 '73.0.3683.47',
548 '72.0.3626.119',
549 '73.0.3683.46',
550 '74.0.3710.2',
551 '72.0.3626.118',
552 '74.0.3711.1',
553 '74.0.3711.0',
554 '73.0.3683.45',
555 '72.0.3626.117',
556 '74.0.3710.1',
557 '74.0.3710.0',
558 '73.0.3683.44',
559 '72.0.3626.116',
560 '74.0.3709.1',
561 '74.0.3709.0',
562 '74.0.3704.9',
563 '73.0.3683.43',
564 '72.0.3626.115',
565 '74.0.3704.8',
566 '74.0.3704.7',
567 '74.0.3708.0',
568 '74.0.3706.7',
569 '74.0.3704.6',
570 '73.0.3683.42',
571 '72.0.3626.114',
572 '74.0.3706.6',
573 '72.0.3626.113',
574 '74.0.3704.5',
575 '74.0.3706.5',
576 '74.0.3706.4',
577 '74.0.3706.3',
578 '74.0.3706.2',
579 '74.0.3706.1',
580 '74.0.3706.0',
581 '73.0.3683.41',
582 '72.0.3626.112',
583 '74.0.3705.1',
584 '74.0.3705.0',
585 '73.0.3683.40',
586 '72.0.3626.111',
587 '73.0.3683.39',
588 '74.0.3704.4',
589 '73.0.3683.38',
590 '74.0.3704.3',
591 '74.0.3704.2',
592 '74.0.3704.1',
593 '74.0.3704.0',
594 '73.0.3683.37',
595 '72.0.3626.110',
596 '72.0.3626.109',
597 '74.0.3703.3',
598 '74.0.3703.2',
599 '73.0.3683.36',
600 '74.0.3703.1',
601 '74.0.3703.0',
602 '73.0.3683.35',
603 '72.0.3626.108',
604 '74.0.3702.2',
605 '74.0.3699.3',
606 '74.0.3702.1',
607 '74.0.3702.0',
608 '73.0.3683.34',
609 '72.0.3626.107',
610 '73.0.3683.33',
611 '74.0.3701.1',
612 '74.0.3701.0',
613 '73.0.3683.32',
614 '73.0.3683.31',
615 '72.0.3626.105',
616 '74.0.3700.1',
617 '74.0.3700.0',
618 '73.0.3683.29',
619 '72.0.3626.103',
620 '74.0.3699.2',
621 '74.0.3699.1',
622 '74.0.3699.0',
623 '73.0.3683.28',
624 '72.0.3626.102',
625 '73.0.3683.27',
626 '73.0.3683.26',
627 '74.0.3698.0',
628 '74.0.3696.2',
629 '72.0.3626.101',
630 '73.0.3683.25',
631 '74.0.3696.1',
632 '74.0.3696.0',
633 '74.0.3694.8',
634 '72.0.3626.100',
635 '74.0.3694.7',
636 '74.0.3694.6',
637 '74.0.3694.5',
638 '74.0.3694.4',
639 '72.0.3626.99',
640 '72.0.3626.98',
641 '74.0.3694.3',
642 '73.0.3683.24',
643 '72.0.3626.97',
644 '72.0.3626.96',
645 '72.0.3626.95',
646 '73.0.3683.23',
647 '72.0.3626.94',
648 '73.0.3683.22',
649 '73.0.3683.21',
650 '72.0.3626.93',
651 '74.0.3694.2',
652 '72.0.3626.92',
653 '74.0.3694.1',
654 '74.0.3694.0',
655 '74.0.3693.6',
656 '73.0.3683.20',
657 '72.0.3626.91',
658 '74.0.3693.5',
659 '74.0.3693.4',
660 '74.0.3693.3',
661 '74.0.3693.2',
662 '73.0.3683.19',
663 '74.0.3693.1',
664 '74.0.3693.0',
665 '73.0.3683.18',
666 '72.0.3626.90',
667 '74.0.3692.1',
668 '74.0.3692.0',
669 '73.0.3683.17',
670 '72.0.3626.89',
671 '74.0.3687.3',
672 '74.0.3691.1',
673 '74.0.3691.0',
674 '73.0.3683.16',
675 '72.0.3626.88',
676 '72.0.3626.87',
677 '73.0.3683.15',
678 '74.0.3690.1',
679 '74.0.3690.0',
680 '73.0.3683.14',
681 '72.0.3626.86',
682 '73.0.3683.13',
683 '73.0.3683.12',
684 '74.0.3689.1',
685 '74.0.3689.0',
686 '73.0.3683.11',
687 '72.0.3626.85',
688 '73.0.3683.10',
689 '72.0.3626.84',
690 '73.0.3683.9',
691 '74.0.3688.1',
692 '74.0.3688.0',
693 '73.0.3683.8',
694 '72.0.3626.83',
695 '74.0.3687.2',
696 '74.0.3687.1',
697 '74.0.3687.0',
698 '73.0.3683.7',
699 '72.0.3626.82',
700 '74.0.3686.4',
701 '72.0.3626.81',
702 '74.0.3686.3',
703 '74.0.3686.2',
704 '74.0.3686.1',
705 '74.0.3686.0',
706 '73.0.3683.6',
707 '72.0.3626.80',
708 '74.0.3685.1',
709 '74.0.3685.0',
710 '73.0.3683.5',
711 '72.0.3626.79',
712 '74.0.3684.1',
713 '74.0.3684.0',
714 '73.0.3683.4',
715 '72.0.3626.78',
716 '72.0.3626.77',
717 '73.0.3683.3',
718 '73.0.3683.2',
719 '72.0.3626.76',
720 '73.0.3683.1',
721 '73.0.3683.0',
722 '72.0.3626.75',
723 '71.0.3578.141',
724 '73.0.3682.1',
725 '73.0.3682.0',
726 '72.0.3626.74',
727 '71.0.3578.140',
728 '73.0.3681.4',
729 '73.0.3681.3',
730 '73.0.3681.2',
731 '73.0.3681.1',
732 '73.0.3681.0',
733 '72.0.3626.73',
734 '71.0.3578.139',
735 '72.0.3626.72',
736 '72.0.3626.71',
737 '73.0.3680.1',
738 '73.0.3680.0',
739 '72.0.3626.70',
740 '71.0.3578.138',
741 '73.0.3678.2',
742 '73.0.3679.1',
743 '73.0.3679.0',
744 '72.0.3626.69',
745 '71.0.3578.137',
746 '73.0.3678.1',
747 '73.0.3678.0',
748 '71.0.3578.136',
749 '73.0.3677.1',
750 '73.0.3677.0',
751 '72.0.3626.68',
752 '72.0.3626.67',
753 '71.0.3578.135',
754 '73.0.3676.1',
755 '73.0.3676.0',
756 '73.0.3674.2',
757 '72.0.3626.66',
758 '71.0.3578.134',
759 '73.0.3674.1',
760 '73.0.3674.0',
761 '72.0.3626.65',
762 '71.0.3578.133',
763 '73.0.3673.2',
764 '73.0.3673.1',
765 '73.0.3673.0',
766 '72.0.3626.64',
767 '71.0.3578.132',
768 '72.0.3626.63',
769 '72.0.3626.62',
770 '72.0.3626.61',
771 '72.0.3626.60',
772 '73.0.3672.1',
773 '73.0.3672.0',
774 '72.0.3626.59',
775 '71.0.3578.131',
776 '73.0.3671.3',
777 '73.0.3671.2',
778 '73.0.3671.1',
779 '73.0.3671.0',
780 '72.0.3626.58',
781 '71.0.3578.130',
782 '73.0.3670.1',
783 '73.0.3670.0',
784 '72.0.3626.57',
785 '71.0.3578.129',
786 '73.0.3669.1',
787 '73.0.3669.0',
788 '72.0.3626.56',
789 '71.0.3578.128',
790 '73.0.3668.2',
791 '73.0.3668.1',
792 '73.0.3668.0',
793 '72.0.3626.55',
794 '71.0.3578.127',
795 '73.0.3667.2',
796 '73.0.3667.1',
797 '73.0.3667.0',
798 '72.0.3626.54',
799 '71.0.3578.126',
800 '73.0.3666.1',
801 '73.0.3666.0',
802 '72.0.3626.53',
803 '71.0.3578.125',
804 '73.0.3665.4',
805 '73.0.3665.3',
806 '72.0.3626.52',
807 '73.0.3665.2',
808 '73.0.3664.4',
809 '73.0.3665.1',
810 '73.0.3665.0',
811 '72.0.3626.51',
812 '71.0.3578.124',
813 '72.0.3626.50',
814 '73.0.3664.3',
815 '73.0.3664.2',
816 '73.0.3664.1',
817 '73.0.3664.0',
818 '73.0.3663.2',
819 '72.0.3626.49',
820 '71.0.3578.123',
821 '73.0.3663.1',
822 '73.0.3663.0',
823 '72.0.3626.48',
824 '71.0.3578.122',
825 '73.0.3662.1',
826 '73.0.3662.0',
827 '72.0.3626.47',
828 '71.0.3578.121',
829 '73.0.3661.1',
830 '72.0.3626.46',
831 '73.0.3661.0',
832 '72.0.3626.45',
833 '71.0.3578.120',
834 '73.0.3660.2',
835 '73.0.3660.1',
836 '73.0.3660.0',
837 '72.0.3626.44',
838 '71.0.3578.119',
839 '73.0.3659.1',
840 '73.0.3659.0',
841 '72.0.3626.43',
842 '71.0.3578.118',
843 '73.0.3658.1',
844 '73.0.3658.0',
845 '72.0.3626.42',
846 '71.0.3578.117',
847 '73.0.3657.1',
848 '73.0.3657.0',
849 '72.0.3626.41',
850 '71.0.3578.116',
851 '73.0.3656.1',
852 '73.0.3656.0',
853 '72.0.3626.40',
854 '71.0.3578.115',
855 '73.0.3655.1',
856 '73.0.3655.0',
857 '72.0.3626.39',
858 '71.0.3578.114',
859 '73.0.3654.1',
860 '73.0.3654.0',
861 '72.0.3626.38',
862 '71.0.3578.113',
863 '73.0.3653.1',
864 '73.0.3653.0',
865 '72.0.3626.37',
866 '71.0.3578.112',
867 '73.0.3652.1',
868 '73.0.3652.0',
869 '72.0.3626.36',
870 '71.0.3578.111',
871 '73.0.3651.1',
872 '73.0.3651.0',
873 '72.0.3626.35',
874 '71.0.3578.110',
875 '73.0.3650.1',
876 '73.0.3650.0',
877 '72.0.3626.34',
878 '71.0.3578.109',
879 '73.0.3649.1',
880 '73.0.3649.0',
881 '72.0.3626.33',
882 '71.0.3578.108',
883 '73.0.3648.2',
884 '73.0.3648.1',
885 '73.0.3648.0',
886 '72.0.3626.32',
887 '71.0.3578.107',
888 '73.0.3647.2',
889 '73.0.3647.1',
890 '73.0.3647.0',
891 '72.0.3626.31',
892 '71.0.3578.106',
893 '73.0.3635.3',
894 '73.0.3646.2',
895 '73.0.3646.1',
896 '73.0.3646.0',
897 '72.0.3626.30',
898 '71.0.3578.105',
899 '72.0.3626.29',
900 '73.0.3645.2',
901 '73.0.3645.1',
902 '73.0.3645.0',
903 '72.0.3626.28',
904 '71.0.3578.104',
905 '72.0.3626.27',
906 '72.0.3626.26',
907 '72.0.3626.25',
908 '72.0.3626.24',
909 '73.0.3644.0',
910 '73.0.3643.2',
911 '72.0.3626.23',
912 '71.0.3578.103',
913 '73.0.3643.1',
914 '73.0.3643.0',
915 '72.0.3626.22',
916 '71.0.3578.102',
917 '73.0.3642.1',
918 '73.0.3642.0',
919 '72.0.3626.21',
920 '71.0.3578.101',
921 '73.0.3641.1',
922 '73.0.3641.0',
923 '72.0.3626.20',
924 '71.0.3578.100',
925 '72.0.3626.19',
926 '73.0.3640.1',
927 '73.0.3640.0',
928 '72.0.3626.18',
929 '73.0.3639.1',
930 '71.0.3578.99',
931 '73.0.3639.0',
932 '72.0.3626.17',
933 '73.0.3638.2',
934 '72.0.3626.16',
935 '73.0.3638.1',
936 '73.0.3638.0',
937 '72.0.3626.15',
938 '71.0.3578.98',
939 '73.0.3635.2',
940 '71.0.3578.97',
941 '73.0.3637.1',
942 '73.0.3637.0',
943 '72.0.3626.14',
944 '71.0.3578.96',
945 '71.0.3578.95',
946 '72.0.3626.13',
947 '71.0.3578.94',
948 '73.0.3636.2',
949 '71.0.3578.93',
950 '73.0.3636.1',
951 '73.0.3636.0',
952 '72.0.3626.12',
953 '71.0.3578.92',
954 '73.0.3635.1',
955 '73.0.3635.0',
956 '72.0.3626.11',
957 '71.0.3578.91',
958 '73.0.3634.2',
959 '73.0.3634.1',
960 '73.0.3634.0',
961 '72.0.3626.10',
962 '71.0.3578.90',
963 '71.0.3578.89',
964 '73.0.3633.2',
965 '73.0.3633.1',
966 '73.0.3633.0',
967 '72.0.3610.4',
968 '72.0.3626.9',
969 '71.0.3578.88',
970 '73.0.3632.5',
971 '73.0.3632.4',
972 '73.0.3632.3',
973 '73.0.3632.2',
974 '73.0.3632.1',
975 '73.0.3632.0',
976 '72.0.3626.8',
977 '71.0.3578.87',
978 '73.0.3631.2',
979 '73.0.3631.1',
980 '73.0.3631.0',
981 '72.0.3626.7',
982 '71.0.3578.86',
983 '72.0.3626.6',
984 '73.0.3630.1',
985 '73.0.3630.0',
986 '72.0.3626.5',
987 '71.0.3578.85',
988 '72.0.3626.4',
989 '73.0.3628.3',
990 '73.0.3628.2',
991 '73.0.3629.1',
992 '73.0.3629.0',
993 '72.0.3626.3',
994 '71.0.3578.84',
995 '73.0.3628.1',
996 '73.0.3628.0',
997 '71.0.3578.83',
998 '73.0.3627.1',
999 '73.0.3627.0',
1000 '72.0.3626.2',
1001 '71.0.3578.82',
1002 '71.0.3578.81',
1003 '71.0.3578.80',
1004 '72.0.3626.1',
1005 '72.0.3626.0',
1006 '71.0.3578.79',
1007 '70.0.3538.124',
1008 '71.0.3578.78',
1009 '72.0.3623.4',
1010 '72.0.3625.2',
1011 '72.0.3625.1',
1012 '72.0.3625.0',
1013 '71.0.3578.77',
1014 '70.0.3538.123',
1015 '72.0.3624.4',
1016 '72.0.3624.3',
1017 '72.0.3624.2',
1018 '71.0.3578.76',
1019 '72.0.3624.1',
1020 '72.0.3624.0',
1021 '72.0.3623.3',
1022 '71.0.3578.75',
1023 '70.0.3538.122',
1024 '71.0.3578.74',
1025 '72.0.3623.2',
1026 '72.0.3610.3',
1027 '72.0.3623.1',
1028 '72.0.3623.0',
1029 '72.0.3622.3',
1030 '72.0.3622.2',
1031 '71.0.3578.73',
1032 '70.0.3538.121',
1033 '72.0.3622.1',
1034 '72.0.3622.0',
1035 '71.0.3578.72',
1036 '70.0.3538.120',
1037 '72.0.3621.1',
1038 '72.0.3621.0',
1039 '71.0.3578.71',
1040 '70.0.3538.119',
1041 '72.0.3620.1',
1042 '72.0.3620.0',
1043 '71.0.3578.70',
1044 '70.0.3538.118',
1045 '71.0.3578.69',
1046 '72.0.3619.1',
1047 '72.0.3619.0',
1048 '71.0.3578.68',
1049 '70.0.3538.117',
1050 '71.0.3578.67',
1051 '72.0.3618.1',
1052 '72.0.3618.0',
1053 '71.0.3578.66',
1054 '70.0.3538.116',
1055 '72.0.3617.1',
1056 '72.0.3617.0',
1057 '71.0.3578.65',
1058 '70.0.3538.115',
1059 '72.0.3602.3',
1060 '71.0.3578.64',
1061 '72.0.3616.1',
1062 '72.0.3616.0',
1063 '71.0.3578.63',
1064 '70.0.3538.114',
1065 '71.0.3578.62',
1066 '72.0.3615.1',
1067 '72.0.3615.0',
1068 '71.0.3578.61',
1069 '70.0.3538.113',
1070 '72.0.3614.1',
1071 '72.0.3614.0',
1072 '71.0.3578.60',
1073 '70.0.3538.112',
1074 '72.0.3613.1',
1075 '72.0.3613.0',
1076 '71.0.3578.59',
1077 '70.0.3538.111',
1078 '72.0.3612.2',
1079 '72.0.3612.1',
1080 '72.0.3612.0',
1081 '70.0.3538.110',
1082 '71.0.3578.58',
1083 '70.0.3538.109',
1084 '72.0.3611.2',
1085 '72.0.3611.1',
1086 '72.0.3611.0',
1087 '71.0.3578.57',
1088 '70.0.3538.108',
1089 '72.0.3610.2',
1090 '71.0.3578.56',
1091 '71.0.3578.55',
1092 '72.0.3610.1',
1093 '72.0.3610.0',
1094 '71.0.3578.54',
1095 '70.0.3538.107',
1096 '71.0.3578.53',
1097 '72.0.3609.3',
1098 '71.0.3578.52',
1099 '72.0.3609.2',
1100 '71.0.3578.51',
1101 '72.0.3608.5',
1102 '72.0.3609.1',
1103 '72.0.3609.0',
1104 '71.0.3578.50',
1105 '70.0.3538.106',
1106 '72.0.3608.4',
1107 '72.0.3608.3',
1108 '72.0.3608.2',
1109 '71.0.3578.49',
1110 '72.0.3608.1',
1111 '72.0.3608.0',
1112 '70.0.3538.105',
1113 '71.0.3578.48',
1114 '72.0.3607.1',
1115 '72.0.3607.0',
1116 '71.0.3578.47',
1117 '70.0.3538.104',
1118 '72.0.3606.2',
1119 '72.0.3606.1',
1120 '72.0.3606.0',
1121 '71.0.3578.46',
1122 '70.0.3538.103',
1123 '70.0.3538.102',
1124 '72.0.3605.3',
1125 '72.0.3605.2',
1126 '72.0.3605.1',
1127 '72.0.3605.0',
1128 '71.0.3578.45',
1129 '70.0.3538.101',
1130 '71.0.3578.44',
1131 '71.0.3578.43',
1132 '70.0.3538.100',
1133 '70.0.3538.99',
1134 '71.0.3578.42',
1135 '72.0.3604.1',
1136 '72.0.3604.0',
1137 '71.0.3578.41',
1138 '70.0.3538.98',
1139 '71.0.3578.40',
1140 '72.0.3603.2',
1141 '72.0.3603.1',
1142 '72.0.3603.0',
1143 '71.0.3578.39',
1144 '70.0.3538.97',
1145 '72.0.3602.2',
1146 '71.0.3578.38',
1147 '71.0.3578.37',
1148 '72.0.3602.1',
1149 '72.0.3602.0',
1150 '71.0.3578.36',
1151 '70.0.3538.96',
1152 '72.0.3601.1',
1153 '72.0.3601.0',
1154 '71.0.3578.35',
1155 '70.0.3538.95',
1156 '72.0.3600.1',
1157 '72.0.3600.0',
1158 '71.0.3578.34',
1159 '70.0.3538.94',
1160 '72.0.3599.3',
1161 '72.0.3599.2',
1162 '72.0.3599.1',
1163 '72.0.3599.0',
1164 '71.0.3578.33',
1165 '70.0.3538.93',
1166 '72.0.3598.1',
1167 '72.0.3598.0',
1168 '71.0.3578.32',
1169 '70.0.3538.87',
1170 '72.0.3597.1',
1171 '72.0.3597.0',
1172 '72.0.3596.2',
1173 '71.0.3578.31',
1174 '70.0.3538.86',
1175 '71.0.3578.30',
1176 '71.0.3578.29',
1177 '72.0.3596.1',
1178 '72.0.3596.0',
1179 '71.0.3578.28',
1180 '70.0.3538.85',
1181 '72.0.3595.2',
1182 '72.0.3591.3',
1183 '72.0.3595.1',
1184 '72.0.3595.0',
1185 '71.0.3578.27',
1186 '70.0.3538.84',
1187 '72.0.3594.1',
1188 '72.0.3594.0',
1189 '71.0.3578.26',
1190 '70.0.3538.83',
1191 '72.0.3593.2',
1192 '72.0.3593.1',
1193 '72.0.3593.0',
1194 '71.0.3578.25',
1195 '70.0.3538.82',
1196 '72.0.3589.3',
1197 '72.0.3592.2',
1198 '72.0.3592.1',
1199 '72.0.3592.0',
1200 '71.0.3578.24',
1201 '72.0.3589.2',
1202 '70.0.3538.81',
1203 '70.0.3538.80',
1204 '72.0.3591.2',
1205 '72.0.3591.1',
1206 '72.0.3591.0',
1207 '71.0.3578.23',
1208 '70.0.3538.79',
1209 '71.0.3578.22',
1210 '72.0.3590.1',
1211 '72.0.3590.0',
1212 '71.0.3578.21',
1213 '70.0.3538.78',
1214 '70.0.3538.77',
1215 '72.0.3589.1',
1216 '72.0.3589.0',
1217 '71.0.3578.20',
1218 '70.0.3538.76',
1219 '71.0.3578.19',
1220 '70.0.3538.75',
1221 '72.0.3588.1',
1222 '72.0.3588.0',
1223 '71.0.3578.18',
1224 '70.0.3538.74',
1225 '72.0.3586.2',
1226 '72.0.3587.0',
1227 '71.0.3578.17',
1228 '70.0.3538.73',
1229 '72.0.3586.1',
1230 '72.0.3586.0',
1231 '71.0.3578.16',
1232 '70.0.3538.72',
1233 '72.0.3585.1',
1234 '72.0.3585.0',
1235 '71.0.3578.15',
1236 '70.0.3538.71',
1237 '71.0.3578.14',
1238 '72.0.3584.1',
1239 '72.0.3584.0',
1240 '71.0.3578.13',
1241 '70.0.3538.70',
1242 '72.0.3583.2',
1243 '71.0.3578.12',
1244 '72.0.3583.1',
1245 '72.0.3583.0',
1246 '71.0.3578.11',
1247 '70.0.3538.69',
1248 '71.0.3578.10',
1249 '72.0.3582.0',
1250 '72.0.3581.4',
1251 '71.0.3578.9',
1252 '70.0.3538.67',
1253 '72.0.3581.3',
1254 '72.0.3581.2',
1255 '72.0.3581.1',
1256 '72.0.3581.0',
1257 '71.0.3578.8',
1258 '70.0.3538.66',
1259 '72.0.3580.1',
1260 '72.0.3580.0',
1261 '71.0.3578.7',
1262 '70.0.3538.65',
1263 '71.0.3578.6',
1264 '72.0.3579.1',
1265 '72.0.3579.0',
1266 '71.0.3578.5',
1267 '70.0.3538.64',
1268 '71.0.3578.4',
1269 '71.0.3578.3',
1270 '71.0.3578.2',
1271 '71.0.3578.1',
1272 '71.0.3578.0',
1273 '70.0.3538.63',
1274 '69.0.3497.128',
1275 '70.0.3538.62',
1276 '70.0.3538.61',
1277 '70.0.3538.60',
1278 '70.0.3538.59',
1279 '71.0.3577.1',
1280 '71.0.3577.0',
1281 '70.0.3538.58',
1282 '69.0.3497.127',
1283 '71.0.3576.2',
1284 '71.0.3576.1',
1285 '71.0.3576.0',
1286 '70.0.3538.57',
1287 '70.0.3538.56',
1288 '71.0.3575.2',
1289 '70.0.3538.55',
1290 '69.0.3497.126',
1291 '70.0.3538.54',
1292 '71.0.3575.1',
1293 '71.0.3575.0',
1294 '71.0.3574.1',
1295 '71.0.3574.0',
1296 '70.0.3538.53',
1297 '69.0.3497.125',
1298 '70.0.3538.52',
1299 '71.0.3573.1',
1300 '71.0.3573.0',
1301 '70.0.3538.51',
1302 '69.0.3497.124',
1303 '71.0.3572.1',
1304 '71.0.3572.0',
1305 '70.0.3538.50',
1306 '69.0.3497.123',
1307 '71.0.3571.2',
1308 '70.0.3538.49',
1309 '69.0.3497.122',
1310 '71.0.3571.1',
1311 '71.0.3571.0',
1312 '70.0.3538.48',
1313 '69.0.3497.121',
1314 '71.0.3570.1',
1315 '71.0.3570.0',
1316 '70.0.3538.47',
1317 '69.0.3497.120',
1318 '71.0.3568.2',
1319 '71.0.3569.1',
1320 '71.0.3569.0',
1321 '70.0.3538.46',
1322 '69.0.3497.119',
1323 '70.0.3538.45',
1324 '71.0.3568.1',
1325 '71.0.3568.0',
1326 '70.0.3538.44',
1327 '69.0.3497.118',
1328 '70.0.3538.43',
1329 '70.0.3538.42',
1330 '71.0.3567.1',
1331 '71.0.3567.0',
1332 '70.0.3538.41',
1333 '69.0.3497.117',
1334 '71.0.3566.1',
1335 '71.0.3566.0',
1336 '70.0.3538.40',
1337 '69.0.3497.116',
1338 '71.0.3565.1',
1339 '71.0.3565.0',
1340 '70.0.3538.39',
1341 '69.0.3497.115',
1342 '71.0.3564.1',
1343 '71.0.3564.0',
1344 '70.0.3538.38',
1345 '69.0.3497.114',
1346 '71.0.3563.0',
1347 '71.0.3562.2',
1348 '70.0.3538.37',
1349 '69.0.3497.113',
1350 '70.0.3538.36',
1351 '70.0.3538.35',
1352 '71.0.3562.1',
1353 '71.0.3562.0',
1354 '70.0.3538.34',
1355 '69.0.3497.112',
1356 '70.0.3538.33',
1357 '71.0.3561.1',
1358 '71.0.3561.0',
1359 '70.0.3538.32',
1360 '69.0.3497.111',
1361 '71.0.3559.6',
1362 '71.0.3560.1',
1363 '71.0.3560.0',
1364 '71.0.3559.5',
1365 '71.0.3559.4',
1366 '70.0.3538.31',
1367 '69.0.3497.110',
1368 '71.0.3559.3',
1369 '70.0.3538.30',
1370 '69.0.3497.109',
1371 '71.0.3559.2',
1372 '71.0.3559.1',
1373 '71.0.3559.0',
1374 '70.0.3538.29',
1375 '69.0.3497.108',
1376 '71.0.3558.2',
1377 '71.0.3558.1',
1378 '71.0.3558.0',
1379 '70.0.3538.28',
1380 '69.0.3497.107',
1381 '71.0.3557.2',
1382 '71.0.3557.1',
1383 '71.0.3557.0',
1384 '70.0.3538.27',
1385 '69.0.3497.106',
1386 '71.0.3554.4',
1387 '70.0.3538.26',
1388 '71.0.3556.1',
1389 '71.0.3556.0',
1390 '70.0.3538.25',
1391 '71.0.3554.3',
1392 '69.0.3497.105',
1393 '71.0.3554.2',
1394 '70.0.3538.24',
1395 '69.0.3497.104',
1396 '71.0.3555.2',
1397 '70.0.3538.23',
1398 '71.0.3555.1',
1399 '71.0.3555.0',
1400 '70.0.3538.22',
1401 '69.0.3497.103',
1402 '71.0.3554.1',
1403 '71.0.3554.0',
1404 '70.0.3538.21',
1405 '69.0.3497.102',
1406 '71.0.3553.3',
1407 '70.0.3538.20',
1408 '69.0.3497.101',
1409 '71.0.3553.2',
1410 '69.0.3497.100',
1411 '71.0.3553.1',
1412 '71.0.3553.0',
1413 '70.0.3538.19',
1414 '69.0.3497.99',
1415 '69.0.3497.98',
1416 '69.0.3497.97',
1417 '71.0.3552.6',
1418 '71.0.3552.5',
1419 '71.0.3552.4',
1420 '71.0.3552.3',
1421 '71.0.3552.2',
1422 '71.0.3552.1',
1423 '71.0.3552.0',
1424 '70.0.3538.18',
1425 '69.0.3497.96',
1426 '71.0.3551.3',
1427 '71.0.3551.2',
1428 '71.0.3551.1',
1429 '71.0.3551.0',
1430 '70.0.3538.17',
1431 '69.0.3497.95',
1432 '71.0.3550.3',
1433 '71.0.3550.2',
1434 '71.0.3550.1',
1435 '71.0.3550.0',
1436 '70.0.3538.16',
1437 '69.0.3497.94',
1438 '71.0.3549.1',
1439 '71.0.3549.0',
1440 '70.0.3538.15',
1441 '69.0.3497.93',
1442 '69.0.3497.92',
1443 '71.0.3548.1',
1444 '71.0.3548.0',
1445 '70.0.3538.14',
1446 '69.0.3497.91',
1447 '71.0.3547.1',
1448 '71.0.3547.0',
1449 '70.0.3538.13',
1450 '69.0.3497.90',
1451 '71.0.3546.2',
1452 '69.0.3497.89',
1453 '71.0.3546.1',
1454 '71.0.3546.0',
1455 '70.0.3538.12',
1456 '69.0.3497.88',
1457 '71.0.3545.4',
1458 '71.0.3545.3',
1459 '71.0.3545.2',
1460 '71.0.3545.1',
1461 '71.0.3545.0',
1462 '70.0.3538.11',
1463 '69.0.3497.87',
1464 '71.0.3544.5',
1465 '71.0.3544.4',
1466 '71.0.3544.3',
1467 '71.0.3544.2',
1468 '71.0.3544.1',
1469 '71.0.3544.0',
1470 '69.0.3497.86',
1471 '70.0.3538.10',
1472 '69.0.3497.85',
1473 '70.0.3538.9',
1474 '69.0.3497.84',
1475 '71.0.3543.4',
1476 '70.0.3538.8',
1477 '71.0.3543.3',
1478 '71.0.3543.2',
1479 '71.0.3543.1',
1480 '71.0.3543.0',
1481 '70.0.3538.7',
1482 '69.0.3497.83',
1483 '71.0.3542.2',
1484 '71.0.3542.1',
1485 '71.0.3542.0',
1486 '70.0.3538.6',
1487 '69.0.3497.82',
1488 '69.0.3497.81',
1489 '71.0.3541.1',
1490 '71.0.3541.0',
1491 '70.0.3538.5',
1492 '69.0.3497.80',
1493 '71.0.3540.1',
1494 '71.0.3540.0',
1495 '70.0.3538.4',
1496 '69.0.3497.79',
1497 '70.0.3538.3',
1498 '71.0.3539.1',
1499 '71.0.3539.0',
1500 '69.0.3497.78',
1501 '68.0.3440.134',
1502 '69.0.3497.77',
1503 '70.0.3538.2',
1504 '70.0.3538.1',
1505 '70.0.3538.0',
1506 '69.0.3497.76',
1507 '68.0.3440.133',
1508 '69.0.3497.75',
1509 '70.0.3537.2',
1510 '70.0.3537.1',
1511 '70.0.3537.0',
1512 '69.0.3497.74',
1513 '68.0.3440.132',
1514 '70.0.3536.0',
1515 '70.0.3535.5',
1516 '70.0.3535.4',
1517 '70.0.3535.3',
1518 '69.0.3497.73',
1519 '68.0.3440.131',
1520 '70.0.3532.8',
1521 '70.0.3532.7',
1522 '69.0.3497.72',
1523 '69.0.3497.71',
1524 '70.0.3535.2',
1525 '70.0.3535.1',
1526 '70.0.3535.0',
1527 '69.0.3497.70',
1528 '68.0.3440.130',
1529 '69.0.3497.69',
1530 '68.0.3440.129',
1531 '70.0.3534.4',
1532 '70.0.3534.3',
1533 '70.0.3534.2',
1534 '70.0.3534.1',
1535 '70.0.3534.0',
1536 '69.0.3497.68',
1537 '68.0.3440.128',
1538 '70.0.3533.2',
1539 '70.0.3533.1',
1540 '70.0.3533.0',
1541 '69.0.3497.67',
1542 '68.0.3440.127',
1543 '70.0.3532.6',
1544 '70.0.3532.5',
1545 '70.0.3532.4',
1546 '69.0.3497.66',
1547 '68.0.3440.126',
1548 '70.0.3532.3',
1549 '70.0.3532.2',
1550 '70.0.3532.1',
1551 '69.0.3497.60',
1552 '69.0.3497.65',
1553 '69.0.3497.64',
1554 '70.0.3532.0',
1555 '70.0.3531.0',
1556 '70.0.3530.4',
1557 '70.0.3530.3',
1558 '70.0.3530.2',
1559 '69.0.3497.58',
1560 '68.0.3440.125',
1561 '69.0.3497.57',
1562 '69.0.3497.56',
1563 '69.0.3497.55',
1564 '69.0.3497.54',
1565 '70.0.3530.1',
1566 '70.0.3530.0',
1567 '69.0.3497.53',
1568 '68.0.3440.124',
1569 '69.0.3497.52',
1570 '70.0.3529.3',
1571 '70.0.3529.2',
1572 '70.0.3529.1',
1573 '70.0.3529.0',
1574 '69.0.3497.51',
1575 '70.0.3528.4',
1576 '68.0.3440.123',
1577 '70.0.3528.3',
1578 '70.0.3528.2',
1579 '70.0.3528.1',
1580 '70.0.3528.0',
1581 '69.0.3497.50',
1582 '68.0.3440.122',
1583 '70.0.3527.1',
1584 '70.0.3527.0',
1585 '69.0.3497.49',
1586 '68.0.3440.121',
1587 '70.0.3526.1',
1588 '70.0.3526.0',
1589 '68.0.3440.120',
1590 '69.0.3497.48',
1591 '69.0.3497.47',
1592 '68.0.3440.119',
1593 '68.0.3440.118',
1594 '70.0.3525.5',
1595 '70.0.3525.4',
1596 '70.0.3525.3',
1597 '68.0.3440.117',
1598 '69.0.3497.46',
1599 '70.0.3525.2',
1600 '70.0.3525.1',
1601 '70.0.3525.0',
1602 '69.0.3497.45',
1603 '68.0.3440.116',
1604 '70.0.3524.4',
1605 '70.0.3524.3',
1606 '69.0.3497.44',
1607 '70.0.3524.2',
1608 '70.0.3524.1',
1609 '70.0.3524.0',
1610 '70.0.3523.2',
1611 '69.0.3497.43',
1612 '68.0.3440.115',
1613 '70.0.3505.9',
1614 '69.0.3497.42',
1615 '70.0.3505.8',
1616 '70.0.3523.1',
1617 '70.0.3523.0',
1618 '69.0.3497.41',
1619 '68.0.3440.114',
1620 '70.0.3505.7',
1621 '69.0.3497.40',
1622 '70.0.3522.1',
1623 '70.0.3522.0',
1624 '70.0.3521.2',
1625 '69.0.3497.39',
1626 '68.0.3440.113',
1627 '70.0.3505.6',
1628 '70.0.3521.1',
1629 '70.0.3521.0',
1630 '69.0.3497.38',
1631 '68.0.3440.112',
1632 '70.0.3520.1',
1633 '70.0.3520.0',
1634 '69.0.3497.37',
1635 '68.0.3440.111',
1636 '70.0.3519.3',
1637 '70.0.3519.2',
1638 '70.0.3519.1',
1639 '70.0.3519.0',
1640 '69.0.3497.36',
1641 '68.0.3440.110',
1642 '70.0.3518.1',
1643 '70.0.3518.0',
1644 '69.0.3497.35',
1645 '69.0.3497.34',
1646 '68.0.3440.109',
1647 '70.0.3517.1',
1648 '70.0.3517.0',
1649 '69.0.3497.33',
1650 '68.0.3440.108',
1651 '69.0.3497.32',
1652 '70.0.3516.3',
1653 '70.0.3516.2',
1654 '70.0.3516.1',
1655 '70.0.3516.0',
1656 '69.0.3497.31',
1657 '68.0.3440.107',
1658 '70.0.3515.4',
1659 '68.0.3440.106',
1660 '70.0.3515.3',
1661 '70.0.3515.2',
1662 '70.0.3515.1',
1663 '70.0.3515.0',
1664 '69.0.3497.30',
1665 '68.0.3440.105',
1666 '68.0.3440.104',
1667 '70.0.3514.2',
1668 '70.0.3514.1',
1669 '70.0.3514.0',
1670 '69.0.3497.29',
1671 '68.0.3440.103',
1672 '70.0.3513.1',
1673 '70.0.3513.0',
1674 '69.0.3497.28',
1675 )
1676 return _USER_AGENT_TPL % random.choice(_CHROME_VERSIONS)
1677
1678
3e669f36 1679std_headers = {
f7a147e3 1680 'User-Agent': random_user_agent(),
59ae15a5
PH
1681 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
1682 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
1683 'Accept-Encoding': 'gzip, deflate',
1684 'Accept-Language': 'en-us,en;q=0.5',
3e669f36 1685}
f427df17 1686
5f6a1245 1687
fb37eb25
S
1688USER_AGENTS = {
1689 'Safari': 'Mozilla/5.0 (X11; Linux x86_64; rv:10.0) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27',
1690}
1691
1692
bf42a990
S
1693NO_DEFAULT = object()
1694
7105440c
YCH
1695ENGLISH_MONTH_NAMES = [
1696 'January', 'February', 'March', 'April', 'May', 'June',
1697 'July', 'August', 'September', 'October', 'November', 'December']
1698
f6717dec
S
1699MONTH_NAMES = {
1700 'en': ENGLISH_MONTH_NAMES,
1701 'fr': [
3e4185c3
S
1702 'janvier', 'février', 'mars', 'avril', 'mai', 'juin',
1703 'juillet', 'août', 'septembre', 'octobre', 'novembre', 'décembre'],
f6717dec 1704}
a942d6cb 1705
a7aaa398
S
1706KNOWN_EXTENSIONS = (
1707 'mp4', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'aac',
1708 'flv', 'f4v', 'f4a', 'f4b',
1709 'webm', 'ogg', 'ogv', 'oga', 'ogx', 'spx', 'opus',
1710 'mkv', 'mka', 'mk3d',
1711 'avi', 'divx',
1712 'mov',
1713 'asf', 'wmv', 'wma',
1714 '3gp', '3g2',
1715 'mp3',
1716 'flac',
1717 'ape',
1718 'wav',
1719 'f4f', 'f4m', 'm3u8', 'smil')
1720
c587cbb7 1721# needed for sanitizing filenames in restricted mode
c8827027 1722ACCENT_CHARS = dict(zip('ÂÃÄÀÁÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖŐØŒÙÚÛÜŰÝÞßàáâãäåæçèéêëìíîïðñòóôõöőøœùúûüűýþÿ',
fd35d8cd
JW
1723 itertools.chain('AAAAAA', ['AE'], 'CEEEEIIIIDNOOOOOOO', ['OE'], 'UUUUUY', ['TH', 'ss'],
1724 'aaaaaa', ['ae'], 'ceeeeiiiionooooooo', ['oe'], 'uuuuuy', ['th'], 'y')))
c587cbb7 1725
46f59e89
S
1726DATE_FORMATS = (
1727 '%d %B %Y',
1728 '%d %b %Y',
1729 '%B %d %Y',
cb655f34
S
1730 '%B %dst %Y',
1731 '%B %dnd %Y',
9d30c213 1732 '%B %drd %Y',
cb655f34 1733 '%B %dth %Y',
46f59e89 1734 '%b %d %Y',
cb655f34
S
1735 '%b %dst %Y',
1736 '%b %dnd %Y',
9d30c213 1737 '%b %drd %Y',
cb655f34 1738 '%b %dth %Y',
46f59e89
S
1739 '%b %dst %Y %I:%M',
1740 '%b %dnd %Y %I:%M',
9d30c213 1741 '%b %drd %Y %I:%M',
46f59e89
S
1742 '%b %dth %Y %I:%M',
1743 '%Y %m %d',
1744 '%Y-%m-%d',
bccdbd22 1745 '%Y.%m.%d.',
46f59e89 1746 '%Y/%m/%d',
81c13222 1747 '%Y/%m/%d %H:%M',
46f59e89 1748 '%Y/%m/%d %H:%M:%S',
1931a55e
THD
1749 '%Y%m%d%H%M',
1750 '%Y%m%d%H%M%S',
0c1c6f4b 1751 '%Y-%m-%d %H:%M',
46f59e89
S
1752 '%Y-%m-%d %H:%M:%S',
1753 '%Y-%m-%d %H:%M:%S.%f',
5014558a 1754 '%Y-%m-%d %H:%M:%S:%f',
46f59e89
S
1755 '%d.%m.%Y %H:%M',
1756 '%d.%m.%Y %H.%M',
1757 '%Y-%m-%dT%H:%M:%SZ',
1758 '%Y-%m-%dT%H:%M:%S.%fZ',
1759 '%Y-%m-%dT%H:%M:%S.%f0Z',
1760 '%Y-%m-%dT%H:%M:%S',
1761 '%Y-%m-%dT%H:%M:%S.%f',
1762 '%Y-%m-%dT%H:%M',
c6eed6b8
S
1763 '%b %d %Y at %H:%M',
1764 '%b %d %Y at %H:%M:%S',
b555ae9b
S
1765 '%B %d %Y at %H:%M',
1766 '%B %d %Y at %H:%M:%S',
a63d9bd0 1767 '%H:%M %d-%b-%Y',
46f59e89
S
1768)
1769
1770DATE_FORMATS_DAY_FIRST = list(DATE_FORMATS)
1771DATE_FORMATS_DAY_FIRST.extend([
1772 '%d-%m-%Y',
1773 '%d.%m.%Y',
1774 '%d.%m.%y',
1775 '%d/%m/%Y',
1776 '%d/%m/%y',
1777 '%d/%m/%Y %H:%M:%S',
1778])
1779
1780DATE_FORMATS_MONTH_FIRST = list(DATE_FORMATS)
1781DATE_FORMATS_MONTH_FIRST.extend([
1782 '%m-%d-%Y',
1783 '%m.%d.%Y',
1784 '%m/%d/%Y',
1785 '%m/%d/%y',
1786 '%m/%d/%Y %H:%M:%S',
1787])
1788
06b3fe29 1789PACKED_CODES_RE = r"}\('(.+)',(\d+),(\d+),'([^']+)'\.split\('\|'\)"
22f5f5c6 1790JSON_LD_RE = r'(?is)<script[^>]+type=(["\']?)application/ld\+json\1[^>]*>(?P<json_ld>.+?)</script>'
06b3fe29 1791
7105440c 1792
d77c3dfd 1793def preferredencoding():
59ae15a5 1794 """Get preferred encoding.
d77c3dfd 1795
59ae15a5
PH
1796 Returns the best encoding scheme for the system, based on
1797 locale.getpreferredencoding() and some further tweaks.
1798 """
1799 try:
1800 pref = locale.getpreferredencoding()
28e614de 1801 'TEST'.encode(pref)
70a1165b 1802 except Exception:
59ae15a5 1803 pref = 'UTF-8'
bae611f2 1804
59ae15a5 1805 return pref
d77c3dfd 1806
f4bfd65f 1807
181c8655 1808def write_json_file(obj, fn):
1394646a 1809 """ Encode obj as JSON and write it to fn, atomically if possible """
181c8655 1810
92120217 1811 fn = encodeFilename(fn)
61ee5aeb 1812 if sys.version_info < (3, 0) and sys.platform != 'win32':
ec5f6016
JMF
1813 encoding = get_filesystem_encoding()
1814 # os.path.basename returns a bytes object, but NamedTemporaryFile
1815 # will fail if the filename contains non ascii characters unless we
1816 # use a unicode object
1817 path_basename = lambda f: os.path.basename(fn).decode(encoding)
1818 # the same for os.path.dirname
1819 path_dirname = lambda f: os.path.dirname(fn).decode(encoding)
1820 else:
1821 path_basename = os.path.basename
1822 path_dirname = os.path.dirname
1823
73159f99
S
1824 args = {
1825 'suffix': '.tmp',
ec5f6016
JMF
1826 'prefix': path_basename(fn) + '.',
1827 'dir': path_dirname(fn),
73159f99
S
1828 'delete': False,
1829 }
1830
181c8655
PH
1831 # In Python 2.x, json.dump expects a bytestream.
1832 # In Python 3.x, it writes to a character stream
1833 if sys.version_info < (3, 0):
73159f99 1834 args['mode'] = 'wb'
181c8655 1835 else:
73159f99
S
1836 args.update({
1837 'mode': 'w',
1838 'encoding': 'utf-8',
1839 })
1840
c86b6142 1841 tf = tempfile.NamedTemporaryFile(**compat_kwargs(args))
181c8655
PH
1842
1843 try:
1844 with tf:
6e84b215 1845 json.dump(obj, tf)
1394646a
IK
1846 if sys.platform == 'win32':
1847 # Need to remove existing file on Windows, else os.rename raises
1848 # WindowsError or FileExistsError.
1849 try:
1850 os.unlink(fn)
1851 except OSError:
1852 pass
9cd5f54e
R
1853 try:
1854 mask = os.umask(0)
1855 os.umask(mask)
1856 os.chmod(tf.name, 0o666 & ~mask)
1857 except OSError:
1858 pass
181c8655 1859 os.rename(tf.name, fn)
70a1165b 1860 except Exception:
181c8655
PH
1861 try:
1862 os.remove(tf.name)
1863 except OSError:
1864 pass
1865 raise
1866
1867
1868if sys.version_info >= (2, 7):
ee114368 1869 def find_xpath_attr(node, xpath, key, val=None):
59ae56fa 1870 """ Find the xpath xpath[@key=val] """
5d2354f1 1871 assert re.match(r'^[a-zA-Z_-]+$', key)
ee114368 1872 expr = xpath + ('[@%s]' % key if val is None else "[@%s='%s']" % (key, val))
59ae56fa
PH
1873 return node.find(expr)
1874else:
ee114368 1875 def find_xpath_attr(node, xpath, key, val=None):
810c10ba 1876 for f in node.findall(compat_xpath(xpath)):
ee114368
S
1877 if key not in f.attrib:
1878 continue
1879 if val is None or f.attrib.get(key) == val:
59ae56fa
PH
1880 return f
1881 return None
1882
d7e66d39
JMF
1883# On python2.6 the xml.etree.ElementTree.Element methods don't support
1884# the namespace parameter
5f6a1245
JW
1885
1886
d7e66d39
JMF
1887def xpath_with_ns(path, ns_map):
1888 components = [c.split(':') for c in path.split('/')]
1889 replaced = []
1890 for c in components:
1891 if len(c) == 1:
1892 replaced.append(c[0])
1893 else:
1894 ns, tag = c
1895 replaced.append('{%s}%s' % (ns_map[ns], tag))
1896 return '/'.join(replaced)
1897
d77c3dfd 1898
a41fb80c 1899def xpath_element(node, xpath, name=None, fatal=False, default=NO_DEFAULT):
578c0745 1900 def _find_xpath(xpath):
810c10ba 1901 return node.find(compat_xpath(xpath))
578c0745
S
1902
1903 if isinstance(xpath, (str, compat_str)):
1904 n = _find_xpath(xpath)
1905 else:
1906 for xp in xpath:
1907 n = _find_xpath(xp)
1908 if n is not None:
1909 break
d74bebd5 1910
8e636da4 1911 if n is None:
bf42a990
S
1912 if default is not NO_DEFAULT:
1913 return default
1914 elif fatal:
bf0ff932
PH
1915 name = xpath if name is None else name
1916 raise ExtractorError('Could not find XML element %s' % name)
1917 else:
1918 return None
a41fb80c
S
1919 return n
1920
1921
1922def xpath_text(node, xpath, name=None, fatal=False, default=NO_DEFAULT):
8e636da4
S
1923 n = xpath_element(node, xpath, name, fatal=fatal, default=default)
1924 if n is None or n == default:
1925 return n
1926 if n.text is None:
1927 if default is not NO_DEFAULT:
1928 return default
1929 elif fatal:
1930 name = xpath if name is None else name
1931 raise ExtractorError('Could not find XML element\'s text %s' % name)
1932 else:
1933 return None
1934 return n.text
a41fb80c
S
1935
1936
1937def xpath_attr(node, xpath, key, name=None, fatal=False, default=NO_DEFAULT):
1938 n = find_xpath_attr(node, xpath, key)
1939 if n is None:
1940 if default is not NO_DEFAULT:
1941 return default
1942 elif fatal:
1943 name = '%s[@%s]' % (xpath, key) if name is None else name
1944 raise ExtractorError('Could not find XML attribute %s' % name)
1945 else:
1946 return None
1947 return n.attrib[key]
bf0ff932
PH
1948
1949
9e6dd238 1950def get_element_by_id(id, html):
43e8fafd 1951 """Return the content of the tag with the specified ID in the passed HTML document"""
611c1dd9 1952 return get_element_by_attribute('id', id, html)
43e8fafd 1953
12ea2f30 1954
84c237fb 1955def get_element_by_class(class_name, html):
2af12ad9
TC
1956 """Return the content of the first tag with the specified class in the passed HTML document"""
1957 retval = get_elements_by_class(class_name, html)
1958 return retval[0] if retval else None
1959
1960
1961def get_element_by_attribute(attribute, value, html, escape_value=True):
1962 retval = get_elements_by_attribute(attribute, value, html, escape_value)
1963 return retval[0] if retval else None
1964
1965
1966def get_elements_by_class(class_name, html):
1967 """Return the content of all tags with the specified class in the passed HTML document as a list"""
1968 return get_elements_by_attribute(
84c237fb
YCH
1969 'class', r'[^\'"]*\b%s\b[^\'"]*' % re.escape(class_name),
1970 html, escape_value=False)
1971
1972
2af12ad9 1973def get_elements_by_attribute(attribute, value, html, escape_value=True):
43e8fafd 1974 """Return the content of the tag with the specified attribute in the passed HTML document"""
9e6dd238 1975
84c237fb
YCH
1976 value = re.escape(value) if escape_value else value
1977
2af12ad9
TC
1978 retlist = []
1979 for m in re.finditer(r'''(?xs)
38285056 1980 <([a-zA-Z0-9:._-]+)
609ff8ca 1981 (?:\s+[a-zA-Z0-9:._-]+(?:=[a-zA-Z0-9:._-]*|="[^"]*"|='[^']*'|))*?
38285056 1982 \s+%s=['"]?%s['"]?
609ff8ca 1983 (?:\s+[a-zA-Z0-9:._-]+(?:=[a-zA-Z0-9:._-]*|="[^"]*"|='[^']*'|))*?
38285056
PH
1984 \s*>
1985 (?P<content>.*?)
1986 </\1>
2af12ad9
TC
1987 ''' % (re.escape(attribute), value), html):
1988 res = m.group('content')
38285056 1989
2af12ad9
TC
1990 if res.startswith('"') or res.startswith("'"):
1991 res = res[1:-1]
38285056 1992
2af12ad9 1993 retlist.append(unescapeHTML(res))
a921f407 1994
2af12ad9 1995 return retlist
a921f407 1996
c5229f39 1997
8bb56eee
BF
1998class HTMLAttributeParser(compat_HTMLParser):
1999 """Trivial HTML parser to gather the attributes for a single element"""
b6e0c7d2 2000
8bb56eee 2001 def __init__(self):
c5229f39 2002 self.attrs = {}
8bb56eee
BF
2003 compat_HTMLParser.__init__(self)
2004
2005 def handle_starttag(self, tag, attrs):
2006 self.attrs = dict(attrs)
2007
c5229f39 2008
8bb56eee
BF
2009def extract_attributes(html_element):
2010 """Given a string for an HTML element such as
2011 <el
2012 a="foo" B="bar" c="&98;az" d=boz
2013 empty= noval entity="&amp;"
2014 sq='"' dq="'"
2015 >
2016 Decode and return a dictionary of attributes.
2017 {
2018 'a': 'foo', 'b': 'bar', c: 'baz', d: 'boz',
2019 'empty': '', 'noval': None, 'entity': '&',
2020 'sq': '"', 'dq': '\''
2021 }.
2022 NB HTMLParser is stricter in Python 2.6 & 3.2 than in later versions,
2023 but the cases in the unit test will work for all of 2.6, 2.7, 3.2-3.5.
2024 """
2025 parser = HTMLAttributeParser()
b4a3d461
S
2026 try:
2027 parser.feed(html_element)
2028 parser.close()
2029 # Older Python may throw HTMLParseError in case of malformed HTML
2030 except compat_HTMLParseError:
2031 pass
8bb56eee 2032 return parser.attrs
9e6dd238 2033
c5229f39 2034
9e6dd238 2035def clean_html(html):
59ae15a5 2036 """Clean an HTML snippet into a readable string"""
dd622d7c
PH
2037
2038 if html is None: # Convenience for sanitizing descriptions etc.
2039 return html
2040
59ae15a5
PH
2041 # Newline vs <br />
2042 html = html.replace('\n', ' ')
edd9221c
TF
2043 html = re.sub(r'(?u)\s*<\s*br\s*/?\s*>\s*', '\n', html)
2044 html = re.sub(r'(?u)<\s*/\s*p\s*>\s*<\s*p[^>]*>', '\n', html)
59ae15a5
PH
2045 # Strip html tags
2046 html = re.sub('<.*?>', '', html)
2047 # Replace html entities
2048 html = unescapeHTML(html)
7decf895 2049 return html.strip()
9e6dd238
FV
2050
2051
d77c3dfd 2052def sanitize_open(filename, open_mode):
59ae15a5
PH
2053 """Try to open the given filename, and slightly tweak it if this fails.
2054
2055 Attempts to open the given filename. If this fails, it tries to change
2056 the filename slightly, step by step, until it's either able to open it
2057 or it fails and raises a final exception, like the standard open()
2058 function.
2059
2060 It returns the tuple (stream, definitive_file_name).
2061 """
2062 try:
28e614de 2063 if filename == '-':
59ae15a5
PH
2064 if sys.platform == 'win32':
2065 import msvcrt
2066 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
898280a0 2067 return (sys.stdout.buffer if hasattr(sys.stdout, 'buffer') else sys.stdout, filename)
59ae15a5
PH
2068 stream = open(encodeFilename(filename), open_mode)
2069 return (stream, filename)
2070 except (IOError, OSError) as err:
f45c185f
PH
2071 if err.errno in (errno.EACCES,):
2072 raise
59ae15a5 2073
f45c185f 2074 # In case of error, try to remove win32 forbidden chars
d55de57b 2075 alt_filename = sanitize_path(filename)
f45c185f
PH
2076 if alt_filename == filename:
2077 raise
2078 else:
2079 # An exception here should be caught in the caller
d55de57b 2080 stream = open(encodeFilename(alt_filename), open_mode)
f45c185f 2081 return (stream, alt_filename)
d77c3dfd
FV
2082
2083
2084def timeconvert(timestr):
59ae15a5
PH
2085 """Convert RFC 2822 defined time string into system timestamp"""
2086 timestamp = None
2087 timetuple = email.utils.parsedate_tz(timestr)
2088 if timetuple is not None:
2089 timestamp = email.utils.mktime_tz(timetuple)
2090 return timestamp
1c469a94 2091
5f6a1245 2092
796173d0 2093def sanitize_filename(s, restricted=False, is_id=False):
59ae15a5
PH
2094 """Sanitizes a string so it could be used as part of a filename.
2095 If restricted is set, use a stricter subset of allowed characters.
158af524
S
2096 Set is_id if this is not an arbitrary string, but an ID that should be kept
2097 if possible.
59ae15a5
PH
2098 """
2099 def replace_insane(char):
c587cbb7
AT
2100 if restricted and char in ACCENT_CHARS:
2101 return ACCENT_CHARS[char]
91dd88b9 2102 elif not restricted and char == '\n':
2103 return ' '
2104 elif char == '?' or ord(char) < 32 or ord(char) == 127:
59ae15a5
PH
2105 return ''
2106 elif char == '"':
2107 return '' if restricted else '\''
2108 elif char == ':':
2109 return '_-' if restricted else ' -'
2110 elif char in '\\/|*<>':
2111 return '_'
627dcfff 2112 if restricted and (char in '!&\'()[]{}$;`^,#' or char.isspace()):
59ae15a5
PH
2113 return '_'
2114 if restricted and ord(char) > 127:
2115 return '_'
2116 return char
2117
639f1cea 2118 if s == '':
2119 return ''
2aeb06d6
PH
2120 # Handle timestamps
2121 s = re.sub(r'[0-9]+(?::[0-9]+)+', lambda m: m.group(0).replace(':', '_'), s)
28e614de 2122 result = ''.join(map(replace_insane, s))
796173d0
PH
2123 if not is_id:
2124 while '__' in result:
2125 result = result.replace('__', '_')
2126 result = result.strip('_')
2127 # Common case of "Foreign band name - English song title"
2128 if restricted and result.startswith('-_'):
2129 result = result[2:]
5a42414b
PH
2130 if result.startswith('-'):
2131 result = '_' + result[len('-'):]
a7440261 2132 result = result.lstrip('.')
796173d0
PH
2133 if not result:
2134 result = '_'
59ae15a5 2135 return result
d77c3dfd 2136
5f6a1245 2137
c2934512 2138def sanitize_path(s, force=False):
a2aaf4db 2139 """Sanitizes and normalizes path on Windows"""
c2934512 2140 if sys.platform == 'win32':
c4218ac3 2141 force = False
c2934512 2142 drive_or_unc, _ = os.path.splitdrive(s)
2143 if sys.version_info < (2, 7) and not drive_or_unc:
2144 drive_or_unc, _ = os.path.splitunc(s)
2145 elif force:
2146 drive_or_unc = ''
2147 else:
a2aaf4db 2148 return s
c2934512 2149
be531ef1
S
2150 norm_path = os.path.normpath(remove_start(s, drive_or_unc)).split(os.path.sep)
2151 if drive_or_unc:
a2aaf4db
S
2152 norm_path.pop(0)
2153 sanitized_path = [
ec85ded8 2154 path_part if path_part in ['.', '..'] else re.sub(r'(?:[/<>:"\|\\?\*]|[\s.]$)', '#', path_part)
a2aaf4db 2155 for path_part in norm_path]
be531ef1
S
2156 if drive_or_unc:
2157 sanitized_path.insert(0, drive_or_unc + os.path.sep)
c4218ac3 2158 elif force and s[0] == os.path.sep:
2159 sanitized_path.insert(0, os.path.sep)
a2aaf4db
S
2160 return os.path.join(*sanitized_path)
2161
2162
17bcc626 2163def sanitize_url(url):
befa4708
S
2164 # Prepend protocol-less URLs with `http:` scheme in order to mitigate
2165 # the number of unwanted failures due to missing protocol
2166 if url.startswith('//'):
2167 return 'http:%s' % url
2168 # Fix some common typos seen so far
2169 COMMON_TYPOS = (
067aa17e 2170 # https://github.com/ytdl-org/youtube-dl/issues/15649
befa4708
S
2171 (r'^httpss://', r'https://'),
2172 # https://bx1.be/lives/direct-tv/
2173 (r'^rmtp([es]?)://', r'rtmp\1://'),
2174 )
2175 for mistake, fixup in COMMON_TYPOS:
2176 if re.match(mistake, url):
2177 return re.sub(mistake, fixup, url)
bc6b9bcd 2178 return url
17bcc626
S
2179
2180
5435dcf9
HH
2181def extract_basic_auth(url):
2182 parts = compat_urlparse.urlsplit(url)
2183 if parts.username is None:
2184 return url, None
2185 url = compat_urlparse.urlunsplit(parts._replace(netloc=(
2186 parts.hostname if parts.port is None
2187 else '%s:%d' % (parts.hostname, parts.port))))
2188 auth_payload = base64.b64encode(
2189 ('%s:%s' % (parts.username, parts.password or '')).encode('utf-8'))
2190 return url, 'Basic ' + auth_payload.decode('utf-8')
2191
2192
67dda517 2193def sanitized_Request(url, *args, **kwargs):
bc6b9bcd 2194 url, auth_header = extract_basic_auth(escape_url(sanitize_url(url)))
5435dcf9
HH
2195 if auth_header is not None:
2196 headers = args[1] if len(args) >= 2 else kwargs.setdefault('headers', {})
2197 headers['Authorization'] = auth_header
2198 return compat_urllib_request.Request(url, *args, **kwargs)
67dda517
S
2199
2200
51098426
S
2201def expand_path(s):
2202 """Expand shell variables and ~"""
2203 return os.path.expandvars(compat_expanduser(s))
2204
2205
d77c3dfd 2206def orderedSet(iterable):
59ae15a5
PH
2207 """ Remove all duplicates from the input iterable """
2208 res = []
2209 for el in iterable:
2210 if el not in res:
2211 res.append(el)
2212 return res
d77c3dfd 2213
912b38b4 2214
55b2f099 2215def _htmlentity_transform(entity_with_semicolon):
4e408e47 2216 """Transforms an HTML entity to a character."""
55b2f099
YCH
2217 entity = entity_with_semicolon[:-1]
2218
4e408e47
PH
2219 # Known non-numeric HTML entity
2220 if entity in compat_html_entities.name2codepoint:
2221 return compat_chr(compat_html_entities.name2codepoint[entity])
2222
55b2f099
YCH
2223 # TODO: HTML5 allows entities without a semicolon. For example,
2224 # '&Eacuteric' should be decoded as 'Éric'.
2225 if entity_with_semicolon in compat_html_entities_html5:
2226 return compat_html_entities_html5[entity_with_semicolon]
2227
91757b0f 2228 mobj = re.match(r'#(x[0-9a-fA-F]+|[0-9]+)', entity)
4e408e47
PH
2229 if mobj is not None:
2230 numstr = mobj.group(1)
28e614de 2231 if numstr.startswith('x'):
4e408e47 2232 base = 16
28e614de 2233 numstr = '0%s' % numstr
4e408e47
PH
2234 else:
2235 base = 10
067aa17e 2236 # See https://github.com/ytdl-org/youtube-dl/issues/7518
7aefc49c
S
2237 try:
2238 return compat_chr(int(numstr, base))
2239 except ValueError:
2240 pass
4e408e47
PH
2241
2242 # Unknown entity in name, return its literal representation
7a3f0c00 2243 return '&%s;' % entity
4e408e47
PH
2244
2245
d77c3dfd 2246def unescapeHTML(s):
912b38b4
PH
2247 if s is None:
2248 return None
2249 assert type(s) == compat_str
d77c3dfd 2250
4e408e47 2251 return re.sub(
95f3f7c2 2252 r'&([^&;]+;)', lambda m: _htmlentity_transform(m.group(1)), s)
d77c3dfd 2253
8bf48f23 2254
cdb19aa4 2255def escapeHTML(text):
2256 return (
2257 text
2258 .replace('&', '&amp;')
2259 .replace('<', '&lt;')
2260 .replace('>', '&gt;')
2261 .replace('"', '&quot;')
2262 .replace("'", '&#39;')
2263 )
2264
2265
f5b1bca9 2266def process_communicate_or_kill(p, *args, **kwargs):
2267 try:
2268 return p.communicate(*args, **kwargs)
2269 except BaseException: # Including KeyboardInterrupt
2270 p.kill()
2271 p.wait()
2272 raise
2273
2274
aa49acd1
S
2275def get_subprocess_encoding():
2276 if sys.platform == 'win32' and sys.getwindowsversion()[0] >= 5:
2277 # For subprocess calls, encode with locale encoding
2278 # Refer to http://stackoverflow.com/a/9951851/35070
2279 encoding = preferredencoding()
2280 else:
2281 encoding = sys.getfilesystemencoding()
2282 if encoding is None:
2283 encoding = 'utf-8'
2284 return encoding
2285
2286
8bf48f23 2287def encodeFilename(s, for_subprocess=False):
59ae15a5
PH
2288 """
2289 @param s The name of the file
2290 """
d77c3dfd 2291
8bf48f23 2292 assert type(s) == compat_str
d77c3dfd 2293
59ae15a5
PH
2294 # Python 3 has a Unicode API
2295 if sys.version_info >= (3, 0):
2296 return s
0f00efed 2297
aa49acd1
S
2298 # Pass '' directly to use Unicode APIs on Windows 2000 and up
2299 # (Detecting Windows NT 4 is tricky because 'major >= 4' would
2300 # match Windows 9x series as well. Besides, NT 4 is obsolete.)
2301 if not for_subprocess and sys.platform == 'win32' and sys.getwindowsversion()[0] >= 5:
2302 return s
2303
8ee239e9
YCH
2304 # Jython assumes filenames are Unicode strings though reported as Python 2.x compatible
2305 if sys.platform.startswith('java'):
2306 return s
2307
aa49acd1
S
2308 return s.encode(get_subprocess_encoding(), 'ignore')
2309
2310
2311def decodeFilename(b, for_subprocess=False):
2312
2313 if sys.version_info >= (3, 0):
2314 return b
2315
2316 if not isinstance(b, bytes):
2317 return b
2318
2319 return b.decode(get_subprocess_encoding(), 'ignore')
8bf48f23 2320
f07b74fc
PH
2321
2322def encodeArgument(s):
2323 if not isinstance(s, compat_str):
2324 # Legacy code that uses byte strings
2325 # Uncomment the following line after fixing all post processors
7af808a5 2326 # assert False, 'Internal error: %r should be of type %r, is %r' % (s, compat_str, type(s))
f07b74fc
PH
2327 s = s.decode('ascii')
2328 return encodeFilename(s, True)
2329
2330
aa49acd1
S
2331def decodeArgument(b):
2332 return decodeFilename(b, True)
2333
2334
8271226a
PH
2335def decodeOption(optval):
2336 if optval is None:
2337 return optval
2338 if isinstance(optval, bytes):
2339 optval = optval.decode(preferredencoding())
2340
2341 assert isinstance(optval, compat_str)
2342 return optval
1c256f70 2343
5f6a1245 2344
cdb19aa4 2345def formatSeconds(secs, delim=':', msec=False):
4539dd30 2346 if secs > 3600:
cdb19aa4 2347 ret = '%d%s%02d%s%02d' % (secs // 3600, delim, (secs % 3600) // 60, delim, secs % 60)
4539dd30 2348 elif secs > 60:
cdb19aa4 2349 ret = '%d%s%02d' % (secs // 60, delim, secs % 60)
4539dd30 2350 else:
cdb19aa4 2351 ret = '%d' % secs
2352 return '%s.%03d' % (ret, secs % 1) if msec else ret
4539dd30 2353
a0ddb8a2 2354
77562778 2355def _ssl_load_windows_store_certs(ssl_context, storename):
2356 # Code adapted from _load_windows_store_certs in https://github.com/python/cpython/blob/main/Lib/ssl.py
2357 try:
2358 certs = [cert for cert, encoding, trust in ssl.enum_certificates(storename)
2359 if encoding == 'x509_asn' and (
2360 trust is True or ssl.Purpose.SERVER_AUTH.oid in trust)]
2361 except PermissionError:
2362 return
2363 for cert in certs:
a2366922 2364 try:
77562778 2365 ssl_context.load_verify_locations(cadata=cert)
2366 except ssl.SSLError:
a2366922
PH
2367 pass
2368
77562778 2369
2370def make_HTTPS_handler(params, **kwargs):
2371 opts_check_certificate = not params.get('nocheckcertificate')
2372 context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
2373 context.check_hostname = opts_check_certificate
2374 context.verify_mode = ssl.CERT_REQUIRED if opts_check_certificate else ssl.CERT_NONE
2375 if opts_check_certificate:
2376 # Work around the issue in load_default_certs when there are bad certificates. See:
2377 # https://github.com/yt-dlp/yt-dlp/issues/1060,
2378 # https://bugs.python.org/issue35665, https://bugs.python.org/issue4531
2379 if sys.platform == 'win32':
2380 for storename in ('CA', 'ROOT'):
2381 _ssl_load_windows_store_certs(context, storename)
303b479e 2382 context.set_default_verify_paths()
77562778 2383 return YoutubeDLHTTPSHandler(params, context=context, **kwargs)
ea6d901e 2384
732ea2f0 2385
5873d4cc 2386def bug_reports_message(before=';'):
08f2a92c 2387 if ytdl_is_updateable():
7a5c1cfe 2388 update_cmd = 'type yt-dlp -U to update'
08f2a92c 2389 else:
7a5c1cfe 2390 update_cmd = 'see https://github.com/yt-dlp/yt-dlp on how to update'
5873d4cc 2391 msg = 'please report this issue on https://github.com/yt-dlp/yt-dlp .'
08f2a92c 2392 msg += ' Make sure you are using the latest version; %s.' % update_cmd
7a5c1cfe 2393 msg += ' Be sure to call yt-dlp with the --verbose flag and include its complete output.'
5873d4cc
F
2394
2395 before = before.rstrip()
2396 if not before or before.endswith(('.', '!', '?')):
2397 msg = msg[0].title() + msg[1:]
2398
2399 return (before + ' ' if before else '') + msg
08f2a92c
JMF
2400
2401
bf5b9d85
PM
2402class YoutubeDLError(Exception):
2403 """Base exception for YoutubeDL errors."""
2404 pass
2405
2406
3158150c 2407network_exceptions = [compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error]
2408if hasattr(ssl, 'CertificateError'):
2409 network_exceptions.append(ssl.CertificateError)
2410network_exceptions = tuple(network_exceptions)
2411
2412
bf5b9d85 2413class ExtractorError(YoutubeDLError):
1c256f70 2414 """Error during info extraction."""
5f6a1245 2415
1151c407 2416 def __init__(self, msg, tb=None, expected=False, cause=None, video_id=None, ie=None):
9a82b238 2417 """ tb, if given, is the original traceback (so that it can be printed out).
7a5c1cfe 2418 If expected is set, this is a normal error message and most likely not a bug in yt-dlp.
9a82b238 2419 """
3158150c 2420 if sys.exc_info()[0] in network_exceptions:
9a82b238 2421 expected = True
d5979c5d 2422
526d74ec 2423 self.msg = str(msg)
1c256f70 2424 self.traceback = tb
1151c407 2425 self.expected = expected
2eabb802 2426 self.cause = cause
d11271dd 2427 self.video_id = video_id
1151c407 2428 self.ie = ie
2429 self.exc_info = sys.exc_info() # preserve original exception
2430
2431 super(ExtractorError, self).__init__(''.join((
2432 format_field(ie, template='[%s] '),
2433 format_field(video_id, template='%s: '),
526d74ec 2434 self.msg,
1151c407 2435 format_field(cause, template=' (caused by %r)'),
2436 '' if expected else bug_reports_message())))
1c256f70 2437
01951dda
PH
2438 def format_traceback(self):
2439 if self.traceback is None:
2440 return None
28e614de 2441 return ''.join(traceback.format_tb(self.traceback))
01951dda 2442
1c256f70 2443
416c7fcb
PH
2444class UnsupportedError(ExtractorError):
2445 def __init__(self, url):
2446 super(UnsupportedError, self).__init__(
2447 'Unsupported URL: %s' % url, expected=True)
2448 self.url = url
2449
2450
55b3e45b
JMF
2451class RegexNotFoundError(ExtractorError):
2452 """Error when a regex didn't match"""
2453 pass
2454
2455
773f291d
S
2456class GeoRestrictedError(ExtractorError):
2457 """Geographic restriction Error exception.
2458
2459 This exception may be thrown when a video is not available from your
2460 geographic location due to geographic restrictions imposed by a website.
2461 """
b6e0c7d2 2462
773f291d
S
2463 def __init__(self, msg, countries=None):
2464 super(GeoRestrictedError, self).__init__(msg, expected=True)
2465 self.msg = msg
2466 self.countries = countries
2467
2468
bf5b9d85 2469class DownloadError(YoutubeDLError):
59ae15a5 2470 """Download Error exception.
d77c3dfd 2471
59ae15a5
PH
2472 This exception may be thrown by FileDownloader objects if they are not
2473 configured to continue on errors. They will contain the appropriate
2474 error message.
2475 """
5f6a1245 2476
8cc83b8d
FV
2477 def __init__(self, msg, exc_info=None):
2478 """ exc_info, if given, is the original exception that caused the trouble (as returned by sys.exc_info()). """
2479 super(DownloadError, self).__init__(msg)
2480 self.exc_info = exc_info
d77c3dfd
FV
2481
2482
498f5606 2483class EntryNotInPlaylist(YoutubeDLError):
2484 """Entry not in playlist exception.
2485
2486 This exception will be thrown by YoutubeDL when a requested entry
2487 is not found in the playlist info_dict
2488 """
2489 pass
2490
2491
bf5b9d85 2492class SameFileError(YoutubeDLError):
59ae15a5 2493 """Same File exception.
d77c3dfd 2494
59ae15a5
PH
2495 This exception will be thrown by FileDownloader objects if they detect
2496 multiple files would have to be downloaded to the same file on disk.
2497 """
2498 pass
d77c3dfd
FV
2499
2500
bf5b9d85 2501class PostProcessingError(YoutubeDLError):
59ae15a5 2502 """Post Processing exception.
d77c3dfd 2503
59ae15a5
PH
2504 This exception may be raised by PostProcessor's .run() method to
2505 indicate an error in the postprocessing task.
2506 """
5f6a1245 2507
7851b379 2508 def __init__(self, msg):
bf5b9d85 2509 super(PostProcessingError, self).__init__(msg)
7851b379 2510 self.msg = msg
d77c3dfd 2511
5f6a1245 2512
8b0d7497 2513class ExistingVideoReached(YoutubeDLError):
2514 """ --max-downloads limit has been reached. """
2515 pass
2516
2517
2518class RejectedVideoReached(YoutubeDLError):
2519 """ --max-downloads limit has been reached. """
2520 pass
2521
2522
51d9739f 2523class ThrottledDownload(YoutubeDLError):
2524 """ Download speed below --throttled-rate. """
2525 pass
2526
2527
bf5b9d85 2528class MaxDownloadsReached(YoutubeDLError):
59ae15a5
PH
2529 """ --max-downloads limit has been reached. """
2530 pass
d77c3dfd
FV
2531
2532
bf5b9d85 2533class UnavailableVideoError(YoutubeDLError):
59ae15a5 2534 """Unavailable Format exception.
d77c3dfd 2535
59ae15a5
PH
2536 This exception will be thrown when a video is requested
2537 in a format that is not available for that video.
2538 """
2539 pass
d77c3dfd
FV
2540
2541
bf5b9d85 2542class ContentTooShortError(YoutubeDLError):
59ae15a5 2543 """Content Too Short exception.
d77c3dfd 2544
59ae15a5
PH
2545 This exception may be raised by FileDownloader objects when a file they
2546 download is too small for what the server announced first, indicating
2547 the connection was probably interrupted.
2548 """
d77c3dfd 2549
59ae15a5 2550 def __init__(self, downloaded, expected):
bf5b9d85
PM
2551 super(ContentTooShortError, self).__init__(
2552 'Downloaded {0} bytes, expected {1} bytes'.format(downloaded, expected)
2553 )
2c7ed247 2554 # Both in bytes
59ae15a5
PH
2555 self.downloaded = downloaded
2556 self.expected = expected
d77c3dfd 2557
5f6a1245 2558
bf5b9d85 2559class XAttrMetadataError(YoutubeDLError):
efa97bdc
YCH
2560 def __init__(self, code=None, msg='Unknown error'):
2561 super(XAttrMetadataError, self).__init__(msg)
2562 self.code = code
bd264412 2563 self.msg = msg
efa97bdc
YCH
2564
2565 # Parsing code and msg
3089bc74 2566 if (self.code in (errno.ENOSPC, errno.EDQUOT)
a0566bbf 2567 or 'No space left' in self.msg or 'Disk quota exceeded' in self.msg):
efa97bdc
YCH
2568 self.reason = 'NO_SPACE'
2569 elif self.code == errno.E2BIG or 'Argument list too long' in self.msg:
2570 self.reason = 'VALUE_TOO_LONG'
2571 else:
2572 self.reason = 'NOT_SUPPORTED'
2573
2574
bf5b9d85 2575class XAttrUnavailableError(YoutubeDLError):
efa97bdc
YCH
2576 pass
2577
2578
c5a59d93 2579def _create_http_connection(ydl_handler, http_class, is_https, *args, **kwargs):
e5e78797
S
2580 # Working around python 2 bug (see http://bugs.python.org/issue17849) by limiting
2581 # expected HTTP responses to meet HTTP/1.0 or later (see also
067aa17e 2582 # https://github.com/ytdl-org/youtube-dl/issues/6727)
e5e78797 2583 if sys.version_info < (3, 0):
65220c3b
S
2584 kwargs['strict'] = True
2585 hc = http_class(*args, **compat_kwargs(kwargs))
be4a824d 2586 source_address = ydl_handler._params.get('source_address')
8959018a 2587
be4a824d 2588 if source_address is not None:
8959018a
AU
2589 # This is to workaround _create_connection() from socket where it will try all
2590 # address data from getaddrinfo() including IPv6. This filters the result from
2591 # getaddrinfo() based on the source_address value.
2592 # This is based on the cpython socket.create_connection() function.
2593 # https://github.com/python/cpython/blob/master/Lib/socket.py#L691
2594 def _create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None):
2595 host, port = address
2596 err = None
2597 addrs = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
9e21e6d9
S
2598 af = socket.AF_INET if '.' in source_address[0] else socket.AF_INET6
2599 ip_addrs = [addr for addr in addrs if addr[0] == af]
2600 if addrs and not ip_addrs:
2601 ip_version = 'v4' if af == socket.AF_INET else 'v6'
2602 raise socket.error(
2603 "No remote IP%s addresses available for connect, can't use '%s' as source address"
2604 % (ip_version, source_address[0]))
8959018a
AU
2605 for res in ip_addrs:
2606 af, socktype, proto, canonname, sa = res
2607 sock = None
2608 try:
2609 sock = socket.socket(af, socktype, proto)
2610 if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
2611 sock.settimeout(timeout)
2612 sock.bind(source_address)
2613 sock.connect(sa)
2614 err = None # Explicitly break reference cycle
2615 return sock
2616 except socket.error as _:
2617 err = _
2618 if sock is not None:
2619 sock.close()
2620 if err is not None:
2621 raise err
2622 else:
9e21e6d9
S
2623 raise socket.error('getaddrinfo returns an empty list')
2624 if hasattr(hc, '_create_connection'):
2625 hc._create_connection = _create_connection
be4a824d
PH
2626 sa = (source_address, 0)
2627 if hasattr(hc, 'source_address'): # Python 2.7+
2628 hc.source_address = sa
2629 else: # Python 2.6
2630 def _hc_connect(self, *args, **kwargs):
9e21e6d9 2631 sock = _create_connection(
be4a824d
PH
2632 (self.host, self.port), self.timeout, sa)
2633 if is_https:
d7932313
PH
2634 self.sock = ssl.wrap_socket(
2635 sock, self.key_file, self.cert_file,
2636 ssl_version=ssl.PROTOCOL_TLSv1)
be4a824d
PH
2637 else:
2638 self.sock = sock
2639 hc.connect = functools.partial(_hc_connect, hc)
2640
2641 return hc
2642
2643
87f0e62d 2644def handle_youtubedl_headers(headers):
992fc9d6
YCH
2645 filtered_headers = headers
2646
2647 if 'Youtubedl-no-compression' in filtered_headers:
2648 filtered_headers = dict((k, v) for k, v in filtered_headers.items() if k.lower() != 'accept-encoding')
87f0e62d 2649 del filtered_headers['Youtubedl-no-compression']
87f0e62d 2650
992fc9d6 2651 return filtered_headers
87f0e62d
YCH
2652
2653
acebc9cd 2654class YoutubeDLHandler(compat_urllib_request.HTTPHandler):
59ae15a5
PH
2655 """Handler for HTTP requests and responses.
2656
2657 This class, when installed with an OpenerDirector, automatically adds
2658 the standard headers to every HTTP request and handles gzipped and
2659 deflated responses from web servers. If compression is to be avoided in
2660 a particular request, the original request in the program code only has
0424ec30 2661 to include the HTTP header "Youtubedl-no-compression", which will be
59ae15a5
PH
2662 removed before making the real request.
2663
2664 Part of this code was copied from:
2665
2666 http://techknack.net/python-urllib2-handlers/
2667
2668 Andrew Rowls, the author of that code, agreed to release it to the
2669 public domain.
2670 """
2671
be4a824d
PH
2672 def __init__(self, params, *args, **kwargs):
2673 compat_urllib_request.HTTPHandler.__init__(self, *args, **kwargs)
2674 self._params = params
2675
2676 def http_open(self, req):
71aff188
YCH
2677 conn_class = compat_http_client.HTTPConnection
2678
2679 socks_proxy = req.headers.get('Ytdl-socks-proxy')
2680 if socks_proxy:
2681 conn_class = make_socks_conn_class(conn_class, socks_proxy)
2682 del req.headers['Ytdl-socks-proxy']
2683
be4a824d 2684 return self.do_open(functools.partial(
71aff188 2685 _create_http_connection, self, conn_class, False),
be4a824d
PH
2686 req)
2687
59ae15a5
PH
2688 @staticmethod
2689 def deflate(data):
fc2119f2 2690 if not data:
2691 return data
59ae15a5
PH
2692 try:
2693 return zlib.decompress(data, -zlib.MAX_WBITS)
2694 except zlib.error:
2695 return zlib.decompress(data)
2696
acebc9cd 2697 def http_request(self, req):
51f267d9
S
2698 # According to RFC 3986, URLs can not contain non-ASCII characters, however this is not
2699 # always respected by websites, some tend to give out URLs with non percent-encoded
2700 # non-ASCII characters (see telemb.py, ard.py [#3412])
2701 # urllib chokes on URLs with non-ASCII characters (see http://bugs.python.org/issue3991)
2702 # To work around aforementioned issue we will replace request's original URL with
2703 # percent-encoded one
2704 # Since redirects are also affected (e.g. http://www.southpark.de/alle-episoden/s18e09)
2705 # the code of this workaround has been moved here from YoutubeDL.urlopen()
2706 url = req.get_full_url()
2707 url_escaped = escape_url(url)
2708
2709 # Substitute URL if any change after escaping
2710 if url != url_escaped:
15d260eb 2711 req = update_Request(req, url=url_escaped)
51f267d9 2712
33ac271b 2713 for h, v in std_headers.items():
3d5f7a39
JK
2714 # Capitalize is needed because of Python bug 2275: http://bugs.python.org/issue2275
2715 # The dict keys are capitalized because of this bug by urllib
2716 if h.capitalize() not in req.headers:
33ac271b 2717 req.add_header(h, v)
87f0e62d
YCH
2718
2719 req.headers = handle_youtubedl_headers(req.headers)
989b4b2b
PH
2720
2721 if sys.version_info < (2, 7) and '#' in req.get_full_url():
2722 # Python 2.6 is brain-dead when it comes to fragments
2723 req._Request__original = req._Request__original.partition('#')[0]
2724 req._Request__r_type = req._Request__r_type.partition('#')[0]
2725
59ae15a5
PH
2726 return req
2727
acebc9cd 2728 def http_response(self, req, resp):
59ae15a5
PH
2729 old_resp = resp
2730 # gzip
2731 if resp.headers.get('Content-encoding', '') == 'gzip':
aa3e9507
PH
2732 content = resp.read()
2733 gz = gzip.GzipFile(fileobj=io.BytesIO(content), mode='rb')
2734 try:
2735 uncompressed = io.BytesIO(gz.read())
2736 except IOError as original_ioerror:
2737 # There may be junk add the end of the file
2738 # See http://stackoverflow.com/q/4928560/35070 for details
2739 for i in range(1, 1024):
2740 try:
2741 gz = gzip.GzipFile(fileobj=io.BytesIO(content[:-i]), mode='rb')
2742 uncompressed = io.BytesIO(gz.read())
2743 except IOError:
2744 continue
2745 break
2746 else:
2747 raise original_ioerror
b407d853 2748 resp = compat_urllib_request.addinfourl(uncompressed, old_resp.headers, old_resp.url, old_resp.code)
59ae15a5 2749 resp.msg = old_resp.msg
c047270c 2750 del resp.headers['Content-encoding']
59ae15a5
PH
2751 # deflate
2752 if resp.headers.get('Content-encoding', '') == 'deflate':
2753 gz = io.BytesIO(self.deflate(resp.read()))
b407d853 2754 resp = compat_urllib_request.addinfourl(gz, old_resp.headers, old_resp.url, old_resp.code)
59ae15a5 2755 resp.msg = old_resp.msg
c047270c 2756 del resp.headers['Content-encoding']
ad729172 2757 # Percent-encode redirect URL of Location HTTP header to satisfy RFC 3986 (see
067aa17e 2758 # https://github.com/ytdl-org/youtube-dl/issues/6457).
5a4d9ddb
S
2759 if 300 <= resp.code < 400:
2760 location = resp.headers.get('Location')
2761 if location:
2762 # As of RFC 2616 default charset is iso-8859-1 that is respected by python 3
2763 if sys.version_info >= (3, 0):
2764 location = location.encode('iso-8859-1').decode('utf-8')
0ea59007
YCH
2765 else:
2766 location = location.decode('utf-8')
5a4d9ddb
S
2767 location_escaped = escape_url(location)
2768 if location != location_escaped:
2769 del resp.headers['Location']
9a4aec8b
YCH
2770 if sys.version_info < (3, 0):
2771 location_escaped = location_escaped.encode('utf-8')
5a4d9ddb 2772 resp.headers['Location'] = location_escaped
59ae15a5 2773 return resp
0f8d03f8 2774
acebc9cd
PH
2775 https_request = http_request
2776 https_response = http_response
bf50b038 2777
5de90176 2778
71aff188
YCH
2779def make_socks_conn_class(base_class, socks_proxy):
2780 assert issubclass(base_class, (
2781 compat_http_client.HTTPConnection, compat_http_client.HTTPSConnection))
2782
2783 url_components = compat_urlparse.urlparse(socks_proxy)
2784 if url_components.scheme.lower() == 'socks5':
2785 socks_type = ProxyType.SOCKS5
2786 elif url_components.scheme.lower() in ('socks', 'socks4'):
2787 socks_type = ProxyType.SOCKS4
51fb4995
YCH
2788 elif url_components.scheme.lower() == 'socks4a':
2789 socks_type = ProxyType.SOCKS4A
71aff188 2790
cdd94c2e
YCH
2791 def unquote_if_non_empty(s):
2792 if not s:
2793 return s
2794 return compat_urllib_parse_unquote_plus(s)
2795
71aff188
YCH
2796 proxy_args = (
2797 socks_type,
2798 url_components.hostname, url_components.port or 1080,
2799 True, # Remote DNS
cdd94c2e
YCH
2800 unquote_if_non_empty(url_components.username),
2801 unquote_if_non_empty(url_components.password),
71aff188
YCH
2802 )
2803
2804 class SocksConnection(base_class):
2805 def connect(self):
2806 self.sock = sockssocket()
2807 self.sock.setproxy(*proxy_args)
2808 if type(self.timeout) in (int, float):
2809 self.sock.settimeout(self.timeout)
2810 self.sock.connect((self.host, self.port))
2811
2812 if isinstance(self, compat_http_client.HTTPSConnection):
2813 if hasattr(self, '_context'): # Python > 2.6
2814 self.sock = self._context.wrap_socket(
2815 self.sock, server_hostname=self.host)
2816 else:
2817 self.sock = ssl.wrap_socket(self.sock)
2818
2819 return SocksConnection
2820
2821
be4a824d
PH
2822class YoutubeDLHTTPSHandler(compat_urllib_request.HTTPSHandler):
2823 def __init__(self, params, https_conn_class=None, *args, **kwargs):
2824 compat_urllib_request.HTTPSHandler.__init__(self, *args, **kwargs)
2825 self._https_conn_class = https_conn_class or compat_http_client.HTTPSConnection
2826 self._params = params
2827
2828 def https_open(self, req):
4f264c02 2829 kwargs = {}
71aff188
YCH
2830 conn_class = self._https_conn_class
2831
4f264c02
JMF
2832 if hasattr(self, '_context'): # python > 2.6
2833 kwargs['context'] = self._context
2834 if hasattr(self, '_check_hostname'): # python 3.x
2835 kwargs['check_hostname'] = self._check_hostname
71aff188
YCH
2836
2837 socks_proxy = req.headers.get('Ytdl-socks-proxy')
2838 if socks_proxy:
2839 conn_class = make_socks_conn_class(conn_class, socks_proxy)
2840 del req.headers['Ytdl-socks-proxy']
2841
be4a824d 2842 return self.do_open(functools.partial(
71aff188 2843 _create_http_connection, self, conn_class, True),
4f264c02 2844 req, **kwargs)
be4a824d
PH
2845
2846
1bab3437 2847class YoutubeDLCookieJar(compat_cookiejar.MozillaCookieJar):
f1a8511f
S
2848 """
2849 See [1] for cookie file format.
2850
2851 1. https://curl.haxx.se/docs/http-cookies.html
2852 """
e7e62441 2853 _HTTPONLY_PREFIX = '#HttpOnly_'
c380cc28
S
2854 _ENTRY_LEN = 7
2855 _HEADER = '''# Netscape HTTP Cookie File
7a5c1cfe 2856# This file is generated by yt-dlp. Do not edit.
c380cc28
S
2857
2858'''
2859 _CookieFileEntry = collections.namedtuple(
2860 'CookieFileEntry',
2861 ('domain_name', 'include_subdomains', 'path', 'https_only', 'expires_at', 'name', 'value'))
e7e62441 2862
1bab3437 2863 def save(self, filename=None, ignore_discard=False, ignore_expires=False):
c380cc28
S
2864 """
2865 Save cookies to a file.
2866
2867 Most of the code is taken from CPython 3.8 and slightly adapted
2868 to support cookie files with UTF-8 in both python 2 and 3.
2869 """
2870 if filename is None:
2871 if self.filename is not None:
2872 filename = self.filename
2873 else:
2874 raise ValueError(compat_cookiejar.MISSING_FILENAME_TEXT)
2875
1bab3437
S
2876 # Store session cookies with `expires` set to 0 instead of an empty
2877 # string
2878 for cookie in self:
2879 if cookie.expires is None:
2880 cookie.expires = 0
c380cc28
S
2881
2882 with io.open(filename, 'w', encoding='utf-8') as f:
2883 f.write(self._HEADER)
2884 now = time.time()
2885 for cookie in self:
2886 if not ignore_discard and cookie.discard:
2887 continue
2888 if not ignore_expires and cookie.is_expired(now):
2889 continue
2890 if cookie.secure:
2891 secure = 'TRUE'
2892 else:
2893 secure = 'FALSE'
2894 if cookie.domain.startswith('.'):
2895 initial_dot = 'TRUE'
2896 else:
2897 initial_dot = 'FALSE'
2898 if cookie.expires is not None:
2899 expires = compat_str(cookie.expires)
2900 else:
2901 expires = ''
2902 if cookie.value is None:
2903 # cookies.txt regards 'Set-Cookie: foo' as a cookie
2904 # with no name, whereas http.cookiejar regards it as a
2905 # cookie with no value.
2906 name = ''
2907 value = cookie.name
2908 else:
2909 name = cookie.name
2910 value = cookie.value
2911 f.write(
2912 '\t'.join([cookie.domain, initial_dot, cookie.path,
2913 secure, expires, name, value]) + '\n')
1bab3437
S
2914
2915 def load(self, filename=None, ignore_discard=False, ignore_expires=False):
e7e62441 2916 """Load cookies from a file."""
2917 if filename is None:
2918 if self.filename is not None:
2919 filename = self.filename
2920 else:
2921 raise ValueError(compat_cookiejar.MISSING_FILENAME_TEXT)
2922
c380cc28
S
2923 def prepare_line(line):
2924 if line.startswith(self._HTTPONLY_PREFIX):
2925 line = line[len(self._HTTPONLY_PREFIX):]
2926 # comments and empty lines are fine
2927 if line.startswith('#') or not line.strip():
2928 return line
2929 cookie_list = line.split('\t')
2930 if len(cookie_list) != self._ENTRY_LEN:
2931 raise compat_cookiejar.LoadError('invalid length %d' % len(cookie_list))
2932 cookie = self._CookieFileEntry(*cookie_list)
2933 if cookie.expires_at and not cookie.expires_at.isdigit():
2934 raise compat_cookiejar.LoadError('invalid expires at %s' % cookie.expires_at)
2935 return line
2936
e7e62441 2937 cf = io.StringIO()
c380cc28 2938 with io.open(filename, encoding='utf-8') as f:
e7e62441 2939 for line in f:
c380cc28
S
2940 try:
2941 cf.write(prepare_line(line))
2942 except compat_cookiejar.LoadError as e:
2943 write_string(
2944 'WARNING: skipping cookie file entry due to %s: %r\n'
2945 % (e, line), sys.stderr)
2946 continue
e7e62441 2947 cf.seek(0)
2948 self._really_load(cf, filename, ignore_discard, ignore_expires)
1bab3437
S
2949 # Session cookies are denoted by either `expires` field set to
2950 # an empty string or 0. MozillaCookieJar only recognizes the former
2951 # (see [1]). So we need force the latter to be recognized as session
2952 # cookies on our own.
2953 # Session cookies may be important for cookies-based authentication,
2954 # e.g. usually, when user does not check 'Remember me' check box while
2955 # logging in on a site, some important cookies are stored as session
2956 # cookies so that not recognizing them will result in failed login.
2957 # 1. https://bugs.python.org/issue17164
2958 for cookie in self:
2959 # Treat `expires=0` cookies as session cookies
2960 if cookie.expires == 0:
2961 cookie.expires = None
2962 cookie.discard = True
2963
2964
a6420bf5
S
2965class YoutubeDLCookieProcessor(compat_urllib_request.HTTPCookieProcessor):
2966 def __init__(self, cookiejar=None):
2967 compat_urllib_request.HTTPCookieProcessor.__init__(self, cookiejar)
2968
2969 def http_response(self, request, response):
2970 # Python 2 will choke on next HTTP request in row if there are non-ASCII
2971 # characters in Set-Cookie HTTP header of last response (see
067aa17e 2972 # https://github.com/ytdl-org/youtube-dl/issues/6769).
a6420bf5
S
2973 # In order to at least prevent crashing we will percent encode Set-Cookie
2974 # header before HTTPCookieProcessor starts processing it.
e28034c5
S
2975 # if sys.version_info < (3, 0) and response.headers:
2976 # for set_cookie_header in ('Set-Cookie', 'Set-Cookie2'):
2977 # set_cookie = response.headers.get(set_cookie_header)
2978 # if set_cookie:
2979 # set_cookie_escaped = compat_urllib_parse.quote(set_cookie, b"%/;:@&=+$,!~*'()?#[] ")
2980 # if set_cookie != set_cookie_escaped:
2981 # del response.headers[set_cookie_header]
2982 # response.headers[set_cookie_header] = set_cookie_escaped
a6420bf5
S
2983 return compat_urllib_request.HTTPCookieProcessor.http_response(self, request, response)
2984
f5fa042c 2985 https_request = compat_urllib_request.HTTPCookieProcessor.http_request
a6420bf5
S
2986 https_response = http_response
2987
2988
fca6dba8 2989class YoutubeDLRedirectHandler(compat_urllib_request.HTTPRedirectHandler):
201c1459 2990 """YoutubeDL redirect handler
2991
2992 The code is based on HTTPRedirectHandler implementation from CPython [1].
2993
2994 This redirect handler solves two issues:
2995 - ensures redirect URL is always unicode under python 2
2996 - introduces support for experimental HTTP response status code
2997 308 Permanent Redirect [2] used by some sites [3]
2998
2999 1. https://github.com/python/cpython/blob/master/Lib/urllib/request.py
3000 2. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/308
3001 3. https://github.com/ytdl-org/youtube-dl/issues/28768
3002 """
3003
3004 http_error_301 = http_error_303 = http_error_307 = http_error_308 = compat_urllib_request.HTTPRedirectHandler.http_error_302
3005
3006 def redirect_request(self, req, fp, code, msg, headers, newurl):
3007 """Return a Request or None in response to a redirect.
3008
3009 This is called by the http_error_30x methods when a
3010 redirection response is received. If a redirection should
3011 take place, return a new Request to allow http_error_30x to
3012 perform the redirect. Otherwise, raise HTTPError if no-one
3013 else should try to handle this url. Return None if you can't
3014 but another Handler might.
3015 """
3016 m = req.get_method()
3017 if (not (code in (301, 302, 303, 307, 308) and m in ("GET", "HEAD")
3018 or code in (301, 302, 303) and m == "POST")):
3019 raise compat_HTTPError(req.full_url, code, msg, headers, fp)
3020 # Strictly (according to RFC 2616), 301 or 302 in response to
3021 # a POST MUST NOT cause a redirection without confirmation
3022 # from the user (of urllib.request, in this case). In practice,
3023 # essentially all clients do redirect in this case, so we do
3024 # the same.
3025
3026 # On python 2 urlh.geturl() may sometimes return redirect URL
3027 # as byte string instead of unicode. This workaround allows
3028 # to force it always return unicode.
3029 if sys.version_info[0] < 3:
3030 newurl = compat_str(newurl)
3031
3032 # Be conciliant with URIs containing a space. This is mainly
3033 # redundant with the more complete encoding done in http_error_302(),
3034 # but it is kept for compatibility with other callers.
3035 newurl = newurl.replace(' ', '%20')
3036
3037 CONTENT_HEADERS = ("content-length", "content-type")
3038 # NB: don't use dict comprehension for python 2.6 compatibility
3039 newheaders = dict((k, v) for k, v in req.headers.items()
3040 if k.lower() not in CONTENT_HEADERS)
3041 return compat_urllib_request.Request(
3042 newurl, headers=newheaders, origin_req_host=req.origin_req_host,
3043 unverifiable=True)
fca6dba8
S
3044
3045
46f59e89
S
3046def extract_timezone(date_str):
3047 m = re.search(
f137e4c2 3048 r'''(?x)
3049 ^.{8,}? # >=8 char non-TZ prefix, if present
3050 (?P<tz>Z| # just the UTC Z, or
3051 (?:(?<=.\b\d{4}|\b\d{2}:\d\d)| # preceded by 4 digits or hh:mm or
3052 (?<!.\b[a-zA-Z]{3}|[a-zA-Z]{4}|..\b\d\d)) # not preceded by 3 alpha word or >= 4 alpha or 2 digits
3053 [ ]? # optional space
3054 (?P<sign>\+|-) # +/-
3055 (?P<hours>[0-9]{2}):?(?P<minutes>[0-9]{2}) # hh[:]mm
3056 $)
3057 ''', date_str)
46f59e89
S
3058 if not m:
3059 timezone = datetime.timedelta()
3060 else:
3061 date_str = date_str[:-len(m.group('tz'))]
3062 if not m.group('sign'):
3063 timezone = datetime.timedelta()
3064 else:
3065 sign = 1 if m.group('sign') == '+' else -1
3066 timezone = datetime.timedelta(
3067 hours=sign * int(m.group('hours')),
3068 minutes=sign * int(m.group('minutes')))
3069 return timezone, date_str
3070
3071
08b38d54 3072def parse_iso8601(date_str, delimiter='T', timezone=None):
912b38b4
PH
3073 """ Return a UNIX timestamp from the given date """
3074
3075 if date_str is None:
3076 return None
3077
52c3a6e4
S
3078 date_str = re.sub(r'\.[0-9]+', '', date_str)
3079
08b38d54 3080 if timezone is None:
46f59e89
S
3081 timezone, date_str = extract_timezone(date_str)
3082
52c3a6e4
S
3083 try:
3084 date_format = '%Y-%m-%d{0}%H:%M:%S'.format(delimiter)
3085 dt = datetime.datetime.strptime(date_str, date_format) - timezone
3086 return calendar.timegm(dt.timetuple())
3087 except ValueError:
3088 pass
912b38b4
PH
3089
3090
46f59e89
S
3091def date_formats(day_first=True):
3092 return DATE_FORMATS_DAY_FIRST if day_first else DATE_FORMATS_MONTH_FIRST
3093
3094
42bdd9d0 3095def unified_strdate(date_str, day_first=True):
bf50b038 3096 """Return a string with the date in the format YYYYMMDD"""
64e7ad60
PH
3097
3098 if date_str is None:
3099 return None
bf50b038 3100 upload_date = None
5f6a1245 3101 # Replace commas
026fcc04 3102 date_str = date_str.replace(',', ' ')
42bdd9d0 3103 # Remove AM/PM + timezone
9bb8e0a3 3104 date_str = re.sub(r'(?i)\s*(?:AM|PM)(?:\s+[A-Z]+)?', '', date_str)
46f59e89 3105 _, date_str = extract_timezone(date_str)
42bdd9d0 3106
46f59e89 3107 for expression in date_formats(day_first):
bf50b038
JMF
3108 try:
3109 upload_date = datetime.datetime.strptime(date_str, expression).strftime('%Y%m%d')
5de90176 3110 except ValueError:
bf50b038 3111 pass
42393ce2
PH
3112 if upload_date is None:
3113 timetuple = email.utils.parsedate_tz(date_str)
3114 if timetuple:
c6b9cf05
S
3115 try:
3116 upload_date = datetime.datetime(*timetuple[:6]).strftime('%Y%m%d')
3117 except ValueError:
3118 pass
6a750402
JMF
3119 if upload_date is not None:
3120 return compat_str(upload_date)
bf50b038 3121
5f6a1245 3122
46f59e89
S
3123def unified_timestamp(date_str, day_first=True):
3124 if date_str is None:
3125 return None
3126
2ae2ffda 3127 date_str = re.sub(r'[,|]', '', date_str)
46f59e89 3128
7dc2a74e 3129 pm_delta = 12 if re.search(r'(?i)PM', date_str) else 0
46f59e89
S
3130 timezone, date_str = extract_timezone(date_str)
3131
3132 # Remove AM/PM + timezone
3133 date_str = re.sub(r'(?i)\s*(?:AM|PM)(?:\s+[A-Z]+)?', '', date_str)
3134
deef3195
S
3135 # Remove unrecognized timezones from ISO 8601 alike timestamps
3136 m = re.search(r'\d{1,2}:\d{1,2}(?:\.\d+)?(?P<tz>\s*[A-Z]+)$', date_str)
3137 if m:
3138 date_str = date_str[:-len(m.group('tz'))]
3139
f226880c
PH
3140 # Python only supports microseconds, so remove nanoseconds
3141 m = re.search(r'^([0-9]{4,}-[0-9]{1,2}-[0-9]{1,2}T[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}\.[0-9]{6})[0-9]+$', date_str)
3142 if m:
3143 date_str = m.group(1)
3144
46f59e89
S
3145 for expression in date_formats(day_first):
3146 try:
7dc2a74e 3147 dt = datetime.datetime.strptime(date_str, expression) - timezone + datetime.timedelta(hours=pm_delta)
46f59e89
S
3148 return calendar.timegm(dt.timetuple())
3149 except ValueError:
3150 pass
3151 timetuple = email.utils.parsedate_tz(date_str)
3152 if timetuple:
7dc2a74e 3153 return calendar.timegm(timetuple) + pm_delta * 3600
46f59e89
S
3154
3155
28e614de 3156def determine_ext(url, default_ext='unknown_video'):
85750f89 3157 if url is None or '.' not in url:
f4776371 3158 return default_ext
9cb9a5df 3159 guess = url.partition('?')[0].rpartition('.')[2]
73e79f2a
PH
3160 if re.match(r'^[A-Za-z0-9]+$', guess):
3161 return guess
a7aaa398
S
3162 # Try extract ext from URLs like http://example.com/foo/bar.mp4/?download
3163 elif guess.rstrip('/') in KNOWN_EXTENSIONS:
9cb9a5df 3164 return guess.rstrip('/')
73e79f2a 3165 else:
cbdbb766 3166 return default_ext
73e79f2a 3167
5f6a1245 3168
824fa511
S
3169def subtitles_filename(filename, sub_lang, sub_format, expected_real_ext=None):
3170 return replace_extension(filename, sub_lang + '.' + sub_format, expected_real_ext)
d4051a8e 3171
5f6a1245 3172
9e62f283 3173def datetime_from_str(date_str, precision='auto', format='%Y%m%d'):
37254abc
JMF
3174 """
3175 Return a datetime object from a string in the format YYYYMMDD or
9e62f283 3176 (now|today|date)[+-][0-9](microsecond|second|minute|hour|day|week|month|year)(s)?
3177
3178 format: string date format used to return datetime object from
3179 precision: round the time portion of a datetime object.
3180 auto|microsecond|second|minute|hour|day.
3181 auto: round to the unit provided in date_str (if applicable).
3182 """
3183 auto_precision = False
3184 if precision == 'auto':
3185 auto_precision = True
3186 precision = 'microsecond'
3187 today = datetime_round(datetime.datetime.now(), precision)
f8795e10 3188 if date_str in ('now', 'today'):
37254abc 3189 return today
f8795e10
PH
3190 if date_str == 'yesterday':
3191 return today - datetime.timedelta(days=1)
9e62f283 3192 match = re.match(
3193 r'(?P<start>.+)(?P<sign>[+-])(?P<time>\d+)(?P<unit>microsecond|second|minute|hour|day|week|month|year)(s)?',
3194 date_str)
37254abc 3195 if match is not None:
9e62f283 3196 start_time = datetime_from_str(match.group('start'), precision, format)
3197 time = int(match.group('time')) * (-1 if match.group('sign') == '-' else 1)
37254abc 3198 unit = match.group('unit')
9e62f283 3199 if unit == 'month' or unit == 'year':
3200 new_date = datetime_add_months(start_time, time * 12 if unit == 'year' else time)
37254abc 3201 unit = 'day'
9e62f283 3202 else:
3203 if unit == 'week':
3204 unit = 'day'
3205 time *= 7
3206 delta = datetime.timedelta(**{unit + 's': time})
3207 new_date = start_time + delta
3208 if auto_precision:
3209 return datetime_round(new_date, unit)
3210 return new_date
3211
3212 return datetime_round(datetime.datetime.strptime(date_str, format), precision)
3213
3214
3215def date_from_str(date_str, format='%Y%m%d'):
3216 """
3217 Return a datetime object from a string in the format YYYYMMDD or
3218 (now|today|date)[+-][0-9](microsecond|second|minute|hour|day|week|month|year)(s)?
3219
3220 format: string date format used to return datetime object from
3221 """
3222 return datetime_from_str(date_str, precision='microsecond', format=format).date()
3223
3224
3225def datetime_add_months(dt, months):
3226 """Increment/Decrement a datetime object by months."""
3227 month = dt.month + months - 1
3228 year = dt.year + month // 12
3229 month = month % 12 + 1
3230 day = min(dt.day, calendar.monthrange(year, month)[1])
3231 return dt.replace(year, month, day)
3232
3233
3234def datetime_round(dt, precision='day'):
3235 """
3236 Round a datetime object's time to a specific precision
3237 """
3238 if precision == 'microsecond':
3239 return dt
3240
3241 unit_seconds = {
3242 'day': 86400,
3243 'hour': 3600,
3244 'minute': 60,
3245 'second': 1,
3246 }
3247 roundto = lambda x, n: ((x + n / 2) // n) * n
3248 timestamp = calendar.timegm(dt.timetuple())
3249 return datetime.datetime.utcfromtimestamp(roundto(timestamp, unit_seconds[precision]))
5f6a1245
JW
3250
3251
e63fc1be 3252def hyphenate_date(date_str):
3253 """
3254 Convert a date in 'YYYYMMDD' format to 'YYYY-MM-DD' format"""
3255 match = re.match(r'^(\d\d\d\d)(\d\d)(\d\d)$', date_str)
3256 if match is not None:
3257 return '-'.join(match.groups())
3258 else:
3259 return date_str
3260
5f6a1245 3261
bd558525
JMF
3262class DateRange(object):
3263 """Represents a time interval between two dates"""
5f6a1245 3264
bd558525
JMF
3265 def __init__(self, start=None, end=None):
3266 """start and end must be strings in the format accepted by date"""
3267 if start is not None:
3268 self.start = date_from_str(start)
3269 else:
3270 self.start = datetime.datetime.min.date()
3271 if end is not None:
3272 self.end = date_from_str(end)
3273 else:
3274 self.end = datetime.datetime.max.date()
37254abc 3275 if self.start > self.end:
bd558525 3276 raise ValueError('Date range: "%s" , the start date must be before the end date' % self)
5f6a1245 3277
bd558525
JMF
3278 @classmethod
3279 def day(cls, day):
3280 """Returns a range that only contains the given day"""
5f6a1245
JW
3281 return cls(day, day)
3282
bd558525
JMF
3283 def __contains__(self, date):
3284 """Check if the date is in the range"""
37254abc
JMF
3285 if not isinstance(date, datetime.date):
3286 date = date_from_str(date)
3287 return self.start <= date <= self.end
5f6a1245 3288
bd558525 3289 def __str__(self):
5f6a1245 3290 return '%s - %s' % (self.start.isoformat(), self.end.isoformat())
c496ca96
PH
3291
3292
3293def platform_name():
3294 """ Returns the platform name as a compat_str """
3295 res = platform.platform()
3296 if isinstance(res, bytes):
3297 res = res.decode(preferredencoding())
3298
3299 assert isinstance(res, compat_str)
3300 return res
c257baff
PH
3301
3302
49fa4d9a
N
3303def get_windows_version():
3304 ''' Get Windows version. None if it's not running on Windows '''
3305 if compat_os_name == 'nt':
3306 return version_tuple(platform.win32_ver()[1])
3307 else:
3308 return None
3309
3310
b58ddb32
PH
3311def _windows_write_string(s, out):
3312 """ Returns True if the string was written using special methods,
3313 False if it has yet to be written out."""
3314 # Adapted from http://stackoverflow.com/a/3259271/35070
3315
3316 import ctypes
3317 import ctypes.wintypes
3318
3319 WIN_OUTPUT_IDS = {
3320 1: -11,
3321 2: -12,
3322 }
3323
a383a98a
PH
3324 try:
3325 fileno = out.fileno()
3326 except AttributeError:
3327 # If the output stream doesn't have a fileno, it's virtual
3328 return False
aa42e873
PH
3329 except io.UnsupportedOperation:
3330 # Some strange Windows pseudo files?
3331 return False
b58ddb32
PH
3332 if fileno not in WIN_OUTPUT_IDS:
3333 return False
3334
d7cd9a9e 3335 GetStdHandle = compat_ctypes_WINFUNCTYPE(
b58ddb32 3336 ctypes.wintypes.HANDLE, ctypes.wintypes.DWORD)(
d7cd9a9e 3337 ('GetStdHandle', ctypes.windll.kernel32))
b58ddb32
PH
3338 h = GetStdHandle(WIN_OUTPUT_IDS[fileno])
3339
d7cd9a9e 3340 WriteConsoleW = compat_ctypes_WINFUNCTYPE(
b58ddb32
PH
3341 ctypes.wintypes.BOOL, ctypes.wintypes.HANDLE, ctypes.wintypes.LPWSTR,
3342 ctypes.wintypes.DWORD, ctypes.POINTER(ctypes.wintypes.DWORD),
d7cd9a9e 3343 ctypes.wintypes.LPVOID)(('WriteConsoleW', ctypes.windll.kernel32))
b58ddb32
PH
3344 written = ctypes.wintypes.DWORD(0)
3345
d7cd9a9e 3346 GetFileType = compat_ctypes_WINFUNCTYPE(ctypes.wintypes.DWORD, ctypes.wintypes.DWORD)(('GetFileType', ctypes.windll.kernel32))
b58ddb32
PH
3347 FILE_TYPE_CHAR = 0x0002
3348 FILE_TYPE_REMOTE = 0x8000
d7cd9a9e 3349 GetConsoleMode = compat_ctypes_WINFUNCTYPE(
b58ddb32
PH
3350 ctypes.wintypes.BOOL, ctypes.wintypes.HANDLE,
3351 ctypes.POINTER(ctypes.wintypes.DWORD))(
d7cd9a9e 3352 ('GetConsoleMode', ctypes.windll.kernel32))
b58ddb32
PH
3353 INVALID_HANDLE_VALUE = ctypes.wintypes.DWORD(-1).value
3354
3355 def not_a_console(handle):
3356 if handle == INVALID_HANDLE_VALUE or handle is None:
3357 return True
3089bc74
S
3358 return ((GetFileType(handle) & ~FILE_TYPE_REMOTE) != FILE_TYPE_CHAR
3359 or GetConsoleMode(handle, ctypes.byref(ctypes.wintypes.DWORD())) == 0)
b58ddb32
PH
3360
3361 if not_a_console(h):
3362 return False
3363
d1b9c912
PH
3364 def next_nonbmp_pos(s):
3365 try:
3366 return next(i for i, c in enumerate(s) if ord(c) > 0xffff)
3367 except StopIteration:
3368 return len(s)
3369
3370 while s:
3371 count = min(next_nonbmp_pos(s), 1024)
3372
b58ddb32 3373 ret = WriteConsoleW(
d1b9c912 3374 h, s, count if count else 2, ctypes.byref(written), None)
b58ddb32
PH
3375 if ret == 0:
3376 raise OSError('Failed to write string')
d1b9c912
PH
3377 if not count: # We just wrote a non-BMP character
3378 assert written.value == 2
3379 s = s[1:]
3380 else:
3381 assert written.value > 0
3382 s = s[written.value:]
b58ddb32
PH
3383 return True
3384
3385
734f90bb 3386def write_string(s, out=None, encoding=None):
7459e3a2
PH
3387 if out is None:
3388 out = sys.stderr
8bf48f23 3389 assert type(s) == compat_str
7459e3a2 3390
b58ddb32
PH
3391 if sys.platform == 'win32' and encoding is None and hasattr(out, 'fileno'):
3392 if _windows_write_string(s, out):
3393 return
3394
3089bc74
S
3395 if ('b' in getattr(out, 'mode', '')
3396 or sys.version_info[0] < 3): # Python 2 lies about mode of sys.stderr
104aa738
PH
3397 byt = s.encode(encoding or preferredencoding(), 'ignore')
3398 out.write(byt)
3399 elif hasattr(out, 'buffer'):
3400 enc = encoding or getattr(out, 'encoding', None) or preferredencoding()
3401 byt = s.encode(enc, 'ignore')
3402 out.buffer.write(byt)
3403 else:
8bf48f23 3404 out.write(s)
7459e3a2
PH
3405 out.flush()
3406
3407
48ea9cea
PH
3408def bytes_to_intlist(bs):
3409 if not bs:
3410 return []
3411 if isinstance(bs[0], int): # Python 3
3412 return list(bs)
3413 else:
3414 return [ord(c) for c in bs]
3415
c257baff 3416
cba892fa 3417def intlist_to_bytes(xs):
3418 if not xs:
3419 return b''
edaa23f8 3420 return compat_struct_pack('%dB' % len(xs), *xs)
c38b1e77
PH
3421
3422
c1c9a79c
PH
3423# Cross-platform file locking
3424if sys.platform == 'win32':
3425 import ctypes.wintypes
3426 import msvcrt
3427
3428 class OVERLAPPED(ctypes.Structure):
3429 _fields_ = [
3430 ('Internal', ctypes.wintypes.LPVOID),
3431 ('InternalHigh', ctypes.wintypes.LPVOID),
3432 ('Offset', ctypes.wintypes.DWORD),
3433 ('OffsetHigh', ctypes.wintypes.DWORD),
3434 ('hEvent', ctypes.wintypes.HANDLE),
3435 ]
3436
3437 kernel32 = ctypes.windll.kernel32
3438 LockFileEx = kernel32.LockFileEx
3439 LockFileEx.argtypes = [
3440 ctypes.wintypes.HANDLE, # hFile
3441 ctypes.wintypes.DWORD, # dwFlags
3442 ctypes.wintypes.DWORD, # dwReserved
3443 ctypes.wintypes.DWORD, # nNumberOfBytesToLockLow
3444 ctypes.wintypes.DWORD, # nNumberOfBytesToLockHigh
3445 ctypes.POINTER(OVERLAPPED) # Overlapped
3446 ]
3447 LockFileEx.restype = ctypes.wintypes.BOOL
3448 UnlockFileEx = kernel32.UnlockFileEx
3449 UnlockFileEx.argtypes = [
3450 ctypes.wintypes.HANDLE, # hFile
3451 ctypes.wintypes.DWORD, # dwReserved
3452 ctypes.wintypes.DWORD, # nNumberOfBytesToLockLow
3453 ctypes.wintypes.DWORD, # nNumberOfBytesToLockHigh
3454 ctypes.POINTER(OVERLAPPED) # Overlapped
3455 ]
3456 UnlockFileEx.restype = ctypes.wintypes.BOOL
3457 whole_low = 0xffffffff
3458 whole_high = 0x7fffffff
3459
3460 def _lock_file(f, exclusive):
3461 overlapped = OVERLAPPED()
3462 overlapped.Offset = 0
3463 overlapped.OffsetHigh = 0
3464 overlapped.hEvent = 0
3465 f._lock_file_overlapped_p = ctypes.pointer(overlapped)
3466 handle = msvcrt.get_osfhandle(f.fileno())
3467 if not LockFileEx(handle, 0x2 if exclusive else 0x0, 0,
3468 whole_low, whole_high, f._lock_file_overlapped_p):
3469 raise OSError('Locking file failed: %r' % ctypes.FormatError())
3470
3471 def _unlock_file(f):
3472 assert f._lock_file_overlapped_p
3473 handle = msvcrt.get_osfhandle(f.fileno())
3474 if not UnlockFileEx(handle, 0,
3475 whole_low, whole_high, f._lock_file_overlapped_p):
3476 raise OSError('Unlocking file failed: %r' % ctypes.FormatError())
3477
3478else:
399a76e6
YCH
3479 # Some platforms, such as Jython, is missing fcntl
3480 try:
3481 import fcntl
c1c9a79c 3482
399a76e6
YCH
3483 def _lock_file(f, exclusive):
3484 fcntl.flock(f, fcntl.LOCK_EX if exclusive else fcntl.LOCK_SH)
c1c9a79c 3485
399a76e6
YCH
3486 def _unlock_file(f):
3487 fcntl.flock(f, fcntl.LOCK_UN)
3488 except ImportError:
3489 UNSUPPORTED_MSG = 'file locking is not supported on this platform'
3490
3491 def _lock_file(f, exclusive):
3492 raise IOError(UNSUPPORTED_MSG)
3493
3494 def _unlock_file(f):
3495 raise IOError(UNSUPPORTED_MSG)
c1c9a79c
PH
3496
3497
3498class locked_file(object):
3499 def __init__(self, filename, mode, encoding=None):
3500 assert mode in ['r', 'a', 'w']
3501 self.f = io.open(filename, mode, encoding=encoding)
3502 self.mode = mode
3503
3504 def __enter__(self):
3505 exclusive = self.mode != 'r'
3506 try:
3507 _lock_file(self.f, exclusive)
3508 except IOError:
3509 self.f.close()
3510 raise
3511 return self
3512
3513 def __exit__(self, etype, value, traceback):
3514 try:
3515 _unlock_file(self.f)
3516 finally:
3517 self.f.close()
3518
3519 def __iter__(self):
3520 return iter(self.f)
3521
3522 def write(self, *args):
3523 return self.f.write(*args)
3524
3525 def read(self, *args):
3526 return self.f.read(*args)
4eb7f1d1
JMF
3527
3528
4644ac55
S
3529def get_filesystem_encoding():
3530 encoding = sys.getfilesystemencoding()
3531 return encoding if encoding is not None else 'utf-8'
3532
3533
4eb7f1d1 3534def shell_quote(args):
a6a173c2 3535 quoted_args = []
4644ac55 3536 encoding = get_filesystem_encoding()
a6a173c2
JMF
3537 for a in args:
3538 if isinstance(a, bytes):
3539 # We may get a filename encoded with 'encodeFilename'
3540 a = a.decode(encoding)
aefce8e6 3541 quoted_args.append(compat_shlex_quote(a))
28e614de 3542 return ' '.join(quoted_args)
9d4660ca
PH
3543
3544
3545def smuggle_url(url, data):
3546 """ Pass additional data in a URL for internal use. """
3547
81953d1a
RA
3548 url, idata = unsmuggle_url(url, {})
3549 data.update(idata)
15707c7e 3550 sdata = compat_urllib_parse_urlencode(
28e614de
PH
3551 {'__youtubedl_smuggle': json.dumps(data)})
3552 return url + '#' + sdata
9d4660ca
PH
3553
3554
79f82953 3555def unsmuggle_url(smug_url, default=None):
83e865a3 3556 if '#__youtubedl_smuggle' not in smug_url:
79f82953 3557 return smug_url, default
28e614de
PH
3558 url, _, sdata = smug_url.rpartition('#')
3559 jsond = compat_parse_qs(sdata)['__youtubedl_smuggle'][0]
9d4660ca
PH
3560 data = json.loads(jsond)
3561 return url, data
02dbf93f
PH
3562
3563
02dbf93f
PH
3564def format_bytes(bytes):
3565 if bytes is None:
28e614de 3566 return 'N/A'
02dbf93f
PH
3567 if type(bytes) is str:
3568 bytes = float(bytes)
3569 if bytes == 0.0:
3570 exponent = 0
3571 else:
3572 exponent = int(math.log(bytes, 1024.0))
28e614de 3573 suffix = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'][exponent]
02dbf93f 3574 converted = float(bytes) / float(1024 ** exponent)
28e614de 3575 return '%.2f%s' % (converted, suffix)
f53c966a 3576
1c088fa8 3577
fb47597b
S
3578def lookup_unit_table(unit_table, s):
3579 units_re = '|'.join(re.escape(u) for u in unit_table)
3580 m = re.match(
782b1b5b 3581 r'(?P<num>[0-9]+(?:[,.][0-9]*)?)\s*(?P<unit>%s)\b' % units_re, s)
fb47597b
S
3582 if not m:
3583 return None
3584 num_str = m.group('num').replace(',', '.')
3585 mult = unit_table[m.group('unit')]
3586 return int(float(num_str) * mult)
3587
3588
be64b5b0
PH
3589def parse_filesize(s):
3590 if s is None:
3591 return None
3592
dfb1b146 3593 # The lower-case forms are of course incorrect and unofficial,
be64b5b0
PH
3594 # but we support those too
3595 _UNIT_TABLE = {
3596 'B': 1,
3597 'b': 1,
70852b47 3598 'bytes': 1,
be64b5b0
PH
3599 'KiB': 1024,
3600 'KB': 1000,
3601 'kB': 1024,
3602 'Kb': 1000,
13585d76 3603 'kb': 1000,
70852b47
YCH
3604 'kilobytes': 1000,
3605 'kibibytes': 1024,
be64b5b0
PH
3606 'MiB': 1024 ** 2,
3607 'MB': 1000 ** 2,
3608 'mB': 1024 ** 2,
3609 'Mb': 1000 ** 2,
13585d76 3610 'mb': 1000 ** 2,
70852b47
YCH
3611 'megabytes': 1000 ** 2,
3612 'mebibytes': 1024 ** 2,
be64b5b0
PH
3613 'GiB': 1024 ** 3,
3614 'GB': 1000 ** 3,
3615 'gB': 1024 ** 3,
3616 'Gb': 1000 ** 3,
13585d76 3617 'gb': 1000 ** 3,
70852b47
YCH
3618 'gigabytes': 1000 ** 3,
3619 'gibibytes': 1024 ** 3,
be64b5b0
PH
3620 'TiB': 1024 ** 4,
3621 'TB': 1000 ** 4,
3622 'tB': 1024 ** 4,
3623 'Tb': 1000 ** 4,
13585d76 3624 'tb': 1000 ** 4,
70852b47
YCH
3625 'terabytes': 1000 ** 4,
3626 'tebibytes': 1024 ** 4,
be64b5b0
PH
3627 'PiB': 1024 ** 5,
3628 'PB': 1000 ** 5,
3629 'pB': 1024 ** 5,
3630 'Pb': 1000 ** 5,
13585d76 3631 'pb': 1000 ** 5,
70852b47
YCH
3632 'petabytes': 1000 ** 5,
3633 'pebibytes': 1024 ** 5,
be64b5b0
PH
3634 'EiB': 1024 ** 6,
3635 'EB': 1000 ** 6,
3636 'eB': 1024 ** 6,
3637 'Eb': 1000 ** 6,
13585d76 3638 'eb': 1000 ** 6,
70852b47
YCH
3639 'exabytes': 1000 ** 6,
3640 'exbibytes': 1024 ** 6,
be64b5b0
PH
3641 'ZiB': 1024 ** 7,
3642 'ZB': 1000 ** 7,
3643 'zB': 1024 ** 7,
3644 'Zb': 1000 ** 7,
13585d76 3645 'zb': 1000 ** 7,
70852b47
YCH
3646 'zettabytes': 1000 ** 7,
3647 'zebibytes': 1024 ** 7,
be64b5b0
PH
3648 'YiB': 1024 ** 8,
3649 'YB': 1000 ** 8,
3650 'yB': 1024 ** 8,
3651 'Yb': 1000 ** 8,
13585d76 3652 'yb': 1000 ** 8,
70852b47
YCH
3653 'yottabytes': 1000 ** 8,
3654 'yobibytes': 1024 ** 8,
be64b5b0
PH
3655 }
3656
fb47597b
S
3657 return lookup_unit_table(_UNIT_TABLE, s)
3658
3659
3660def parse_count(s):
3661 if s is None:
be64b5b0
PH
3662 return None
3663
fb47597b
S
3664 s = s.strip()
3665
3666 if re.match(r'^[\d,.]+$', s):
3667 return str_to_int(s)
3668
3669 _UNIT_TABLE = {
3670 'k': 1000,
3671 'K': 1000,
3672 'm': 1000 ** 2,
3673 'M': 1000 ** 2,
3674 'kk': 1000 ** 2,
3675 'KK': 1000 ** 2,
3676 }
be64b5b0 3677
fb47597b 3678 return lookup_unit_table(_UNIT_TABLE, s)
be64b5b0 3679
2f7ae819 3680
b871d7e9
S
3681def parse_resolution(s):
3682 if s is None:
3683 return {}
3684
3685 mobj = re.search(r'\b(?P<w>\d+)\s*[xX×]\s*(?P<h>\d+)\b', s)
3686 if mobj:
3687 return {
3688 'width': int(mobj.group('w')),
3689 'height': int(mobj.group('h')),
3690 }
3691
3692 mobj = re.search(r'\b(\d+)[pPiI]\b', s)
3693 if mobj:
3694 return {'height': int(mobj.group(1))}
3695
3696 mobj = re.search(r'\b([48])[kK]\b', s)
3697 if mobj:
3698 return {'height': int(mobj.group(1)) * 540}
3699
3700 return {}
3701
3702
0dc41787
S
3703def parse_bitrate(s):
3704 if not isinstance(s, compat_str):
3705 return
3706 mobj = re.search(r'\b(\d+)\s*kbps', s)
3707 if mobj:
3708 return int(mobj.group(1))
3709
3710
a942d6cb 3711def month_by_name(name, lang='en'):
caefb1de
PH
3712 """ Return the number of a month by (locale-independently) English name """
3713
f6717dec 3714 month_names = MONTH_NAMES.get(lang, MONTH_NAMES['en'])
a942d6cb 3715
caefb1de 3716 try:
f6717dec 3717 return month_names.index(name) + 1
7105440c
YCH
3718 except ValueError:
3719 return None
3720
3721
3722def month_by_abbreviation(abbrev):
3723 """ Return the number of a month by (locale-independently) English
3724 abbreviations """
3725
3726 try:
3727 return [s[:3] for s in ENGLISH_MONTH_NAMES].index(abbrev) + 1
caefb1de
PH
3728 except ValueError:
3729 return None
18258362
JMF
3730
3731
5aafe895 3732def fix_xml_ampersands(xml_str):
18258362 3733 """Replace all the '&' by '&amp;' in XML"""
5aafe895
PH
3734 return re.sub(
3735 r'&(?!amp;|lt;|gt;|apos;|quot;|#x[0-9a-fA-F]{,4};|#[0-9]{,4};)',
28e614de 3736 '&amp;',
5aafe895 3737 xml_str)
e3946f98
PH
3738
3739
3740def setproctitle(title):
8bf48f23 3741 assert isinstance(title, compat_str)
c1c05c67
YCH
3742
3743 # ctypes in Jython is not complete
3744 # http://bugs.jython.org/issue2148
3745 if sys.platform.startswith('java'):
3746 return
3747
e3946f98 3748 try:
611c1dd9 3749 libc = ctypes.cdll.LoadLibrary('libc.so.6')
e3946f98
PH
3750 except OSError:
3751 return
2f49bcd6
RC
3752 except TypeError:
3753 # LoadLibrary in Windows Python 2.7.13 only expects
3754 # a bytestring, but since unicode_literals turns
3755 # every string into a unicode string, it fails.
3756 return
6eefe533
PH
3757 title_bytes = title.encode('utf-8')
3758 buf = ctypes.create_string_buffer(len(title_bytes))
3759 buf.value = title_bytes
e3946f98 3760 try:
6eefe533 3761 libc.prctl(15, buf, 0, 0, 0)
e3946f98
PH
3762 except AttributeError:
3763 return # Strange libc, just skip this
d7dda168
PH
3764
3765
3766def remove_start(s, start):
46bc9b7d 3767 return s[len(start):] if s is not None and s.startswith(start) else s
29eb5174
PH
3768
3769
2b9faf55 3770def remove_end(s, end):
46bc9b7d 3771 return s[:-len(end)] if s is not None and s.endswith(end) else s
2b9faf55
PH
3772
3773
31b2051e
S
3774def remove_quotes(s):
3775 if s is None or len(s) < 2:
3776 return s
3777 for quote in ('"', "'", ):
3778 if s[0] == quote and s[-1] == quote:
3779 return s[1:-1]
3780 return s
3781
3782
b6e0c7d2
U
3783def get_domain(url):
3784 domain = re.match(r'(?:https?:\/\/)?(?:www\.)?(?P<domain>[^\n\/]+\.[^\n\/]+)(?:\/(.*))?', url)
3785 return domain.group('domain') if domain else None
3786
3787
29eb5174 3788def url_basename(url):
9b8aaeed 3789 path = compat_urlparse.urlparse(url).path
28e614de 3790 return path.strip('/').split('/')[-1]
aa94a6d3
PH
3791
3792
02dc0a36
S
3793def base_url(url):
3794 return re.match(r'https?://[^?#&]+/', url).group()
3795
3796
e34c3361 3797def urljoin(base, path):
4b5de77b
S
3798 if isinstance(path, bytes):
3799 path = path.decode('utf-8')
e34c3361
S
3800 if not isinstance(path, compat_str) or not path:
3801 return None
fad4ceb5 3802 if re.match(r'^(?:[a-zA-Z][a-zA-Z0-9+-.]*:)?//', path):
e34c3361 3803 return path
4b5de77b
S
3804 if isinstance(base, bytes):
3805 base = base.decode('utf-8')
3806 if not isinstance(base, compat_str) or not re.match(
3807 r'^(?:https?:)?//', base):
e34c3361
S
3808 return None
3809 return compat_urlparse.urljoin(base, path)
3810
3811
aa94a6d3
PH
3812class HEADRequest(compat_urllib_request.Request):
3813 def get_method(self):
611c1dd9 3814 return 'HEAD'
7217e148
PH
3815
3816
95cf60e8
S
3817class PUTRequest(compat_urllib_request.Request):
3818 def get_method(self):
3819 return 'PUT'
3820
3821
9732d77e 3822def int_or_none(v, scale=1, default=None, get_attr=None, invscale=1):
28746fbd
PH
3823 if get_attr:
3824 if v is not None:
3825 v = getattr(v, get_attr, None)
9572013d
PH
3826 if v == '':
3827 v = None
1812afb7
S
3828 if v is None:
3829 return default
3830 try:
3831 return int(v) * invscale // scale
5e1271c5 3832 except (ValueError, TypeError):
af98f8ff 3833 return default
9732d77e 3834
9572013d 3835
40a90862
JMF
3836def str_or_none(v, default=None):
3837 return default if v is None else compat_str(v)
3838
9732d77e
PH
3839
3840def str_to_int(int_str):
48d4681e 3841 """ A more relaxed version of int_or_none """
42db58ec 3842 if isinstance(int_str, compat_integer_types):
348c6bf1 3843 return int_str
42db58ec
S
3844 elif isinstance(int_str, compat_str):
3845 int_str = re.sub(r'[,\.\+]', '', int_str)
3846 return int_or_none(int_str)
608d11f5
PH
3847
3848
9732d77e 3849def float_or_none(v, scale=1, invscale=1, default=None):
caf80631
S
3850 if v is None:
3851 return default
3852 try:
3853 return float(v) * invscale / scale
5e1271c5 3854 except (ValueError, TypeError):
caf80631 3855 return default
43f775e4
PH
3856
3857
c7e327c4
S
3858def bool_or_none(v, default=None):
3859 return v if isinstance(v, bool) else default
3860
3861
53cd37ba
S
3862def strip_or_none(v, default=None):
3863 return v.strip() if isinstance(v, compat_str) else default
b72b4431
S
3864
3865
af03000a
S
3866def url_or_none(url):
3867 if not url or not isinstance(url, compat_str):
3868 return None
3869 url = url.strip()
29f7c58a 3870 return url if re.match(r'^(?:(?:https?|rt(?:m(?:pt?[es]?|fp)|sp[su]?)|mms|ftps?):)?//', url) else None
af03000a
S
3871
3872
e29663c6 3873def strftime_or_none(timestamp, date_format, default=None):
3874 datetime_object = None
3875 try:
3876 if isinstance(timestamp, compat_numeric_types): # unix timestamp
3877 datetime_object = datetime.datetime.utcfromtimestamp(timestamp)
3878 elif isinstance(timestamp, compat_str): # assume YYYYMMDD
3879 datetime_object = datetime.datetime.strptime(timestamp, '%Y%m%d')
3880 return datetime_object.strftime(date_format)
3881 except (ValueError, TypeError, AttributeError):
3882 return default
3883
3884
608d11f5 3885def parse_duration(s):
8f9312c3 3886 if not isinstance(s, compat_basestring):
608d11f5
PH
3887 return None
3888
ca7b3246
S
3889 s = s.strip()
3890
acaff495 3891 days, hours, mins, secs, ms = [None] * 5
15846398 3892 m = re.match(r'(?:(?:(?:(?P<days>[0-9]+):)?(?P<hours>[0-9]+):)?(?P<mins>[0-9]+):)?(?P<secs>[0-9]+)(?P<ms>\.[0-9]+)?Z?$', s)
acaff495 3893 if m:
3894 days, hours, mins, secs, ms = m.groups()
3895 else:
3896 m = re.match(
056653bb
S
3897 r'''(?ix)(?:P?
3898 (?:
3899 [0-9]+\s*y(?:ears?)?\s*
3900 )?
3901 (?:
3902 [0-9]+\s*m(?:onths?)?\s*
3903 )?
3904 (?:
3905 [0-9]+\s*w(?:eeks?)?\s*
3906 )?
8f4b58d7 3907 (?:
acaff495 3908 (?P<days>[0-9]+)\s*d(?:ays?)?\s*
8f4b58d7 3909 )?
056653bb 3910 T)?
acaff495 3911 (?:
3912 (?P<hours>[0-9]+)\s*h(?:ours?)?\s*
3913 )?
3914 (?:
3915 (?P<mins>[0-9]+)\s*m(?:in(?:ute)?s?)?\s*
3916 )?
3917 (?:
3918 (?P<secs>[0-9]+)(?P<ms>\.[0-9]+)?\s*s(?:ec(?:ond)?s?)?\s*
15846398 3919 )?Z?$''', s)
acaff495 3920 if m:
3921 days, hours, mins, secs, ms = m.groups()
3922 else:
15846398 3923 m = re.match(r'(?i)(?:(?P<hours>[0-9.]+)\s*(?:hours?)|(?P<mins>[0-9.]+)\s*(?:mins?\.?|minutes?)\s*)Z?$', s)
acaff495 3924 if m:
3925 hours, mins = m.groups()
3926 else:
3927 return None
3928
3929 duration = 0
3930 if secs:
3931 duration += float(secs)
3932 if mins:
3933 duration += float(mins) * 60
3934 if hours:
3935 duration += float(hours) * 60 * 60
3936 if days:
3937 duration += float(days) * 24 * 60 * 60
3938 if ms:
3939 duration += float(ms)
3940 return duration
91d7d0b3
JMF
3941
3942
e65e4c88 3943def prepend_extension(filename, ext, expected_real_ext=None):
5f6a1245 3944 name, real_ext = os.path.splitext(filename)
e65e4c88
S
3945 return (
3946 '{0}.{1}{2}'.format(name, ext, real_ext)
3947 if not expected_real_ext or real_ext[1:] == expected_real_ext
3948 else '{0}.{1}'.format(filename, ext))
d70ad093
PH
3949
3950
b3ed15b7
S
3951def replace_extension(filename, ext, expected_real_ext=None):
3952 name, real_ext = os.path.splitext(filename)
3953 return '{0}.{1}'.format(
3954 name if not expected_real_ext or real_ext[1:] == expected_real_ext else filename,
3955 ext)
3956
3957
d70ad093
PH
3958def check_executable(exe, args=[]):
3959 """ Checks if the given binary is installed somewhere in PATH, and returns its name.
3960 args can be a list of arguments for a short output (like -version) """
3961 try:
f5b1bca9 3962 process_communicate_or_kill(subprocess.Popen(
3963 [exe] + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE))
d70ad093
PH
3964 except OSError:
3965 return False
3966 return exe
b7ab0590
PH
3967
3968
95807118 3969def get_exe_version(exe, args=['--version'],
cae97f65 3970 version_re=None, unrecognized='present'):
95807118
PH
3971 """ Returns the version of the specified executable,
3972 or False if the executable is not present """
3973 try:
b64d04c1 3974 # STDIN should be redirected too. On UNIX-like systems, ffmpeg triggers
7a5c1cfe 3975 # SIGTTOU if yt-dlp is run in the background.
067aa17e 3976 # See https://github.com/ytdl-org/youtube-dl/issues/955#issuecomment-209789656
f5b1bca9 3977 out, _ = process_communicate_or_kill(subprocess.Popen(
54116803 3978 [encodeArgument(exe)] + args,
00ca7552 3979 stdin=subprocess.PIPE,
f5b1bca9 3980 stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
95807118
PH
3981 except OSError:
3982 return False
cae97f65
PH
3983 if isinstance(out, bytes): # Python 2.x
3984 out = out.decode('ascii', 'ignore')
3985 return detect_exe_version(out, version_re, unrecognized)
3986
3987
3988def detect_exe_version(output, version_re=None, unrecognized='present'):
3989 assert isinstance(output, compat_str)
3990 if version_re is None:
3991 version_re = r'version\s+([-0-9._a-zA-Z]+)'
3992 m = re.search(version_re, output)
95807118
PH
3993 if m:
3994 return m.group(1)
3995 else:
3996 return unrecognized
3997
3998
cb89cfc1 3999class LazyList(collections.abc.Sequence):
483336e7 4000 ''' Lazy immutable list from an iterable
4001 Note that slices of a LazyList are lists and not LazyList'''
4002
8e5fecc8 4003 class IndexError(IndexError):
4004 pass
4005
483336e7 4006 def __init__(self, iterable):
4007 self.__iterable = iter(iterable)
4008 self.__cache = []
28419ca2 4009 self.__reversed = False
483336e7 4010
4011 def __iter__(self):
28419ca2 4012 if self.__reversed:
4013 # We need to consume the entire iterable to iterate in reverse
981052c9 4014 yield from self.exhaust()
28419ca2 4015 return
4016 yield from self.__cache
483336e7 4017 for item in self.__iterable:
4018 self.__cache.append(item)
4019 yield item
4020
981052c9 4021 def __exhaust(self):
483336e7 4022 self.__cache.extend(self.__iterable)
28419ca2 4023 return self.__cache
4024
981052c9 4025 def exhaust(self):
4026 ''' Evaluate the entire iterable '''
4027 return self.__exhaust()[::-1 if self.__reversed else 1]
4028
28419ca2 4029 @staticmethod
981052c9 4030 def __reverse_index(x):
e0f2b4b4 4031 return None if x is None else -(x + 1)
483336e7 4032
4033 def __getitem__(self, idx):
4034 if isinstance(idx, slice):
28419ca2 4035 if self.__reversed:
e0f2b4b4 4036 idx = slice(self.__reverse_index(idx.start), self.__reverse_index(idx.stop), -(idx.step or 1))
4037 start, stop, step = idx.start, idx.stop, idx.step or 1
483336e7 4038 elif isinstance(idx, int):
28419ca2 4039 if self.__reversed:
981052c9 4040 idx = self.__reverse_index(idx)
e0f2b4b4 4041 start, stop, step = idx, idx, 0
483336e7 4042 else:
4043 raise TypeError('indices must be integers or slices')
e0f2b4b4 4044 if ((start or 0) < 0 or (stop or 0) < 0
4045 or (start is None and step < 0)
4046 or (stop is None and step > 0)):
483336e7 4047 # We need to consume the entire iterable to be able to slice from the end
4048 # Obviously, never use this with infinite iterables
8e5fecc8 4049 self.__exhaust()
4050 try:
4051 return self.__cache[idx]
4052 except IndexError as e:
4053 raise self.IndexError(e) from e
e0f2b4b4 4054 n = max(start or 0, stop or 0) - len(self.__cache) + 1
28419ca2 4055 if n > 0:
4056 self.__cache.extend(itertools.islice(self.__iterable, n))
8e5fecc8 4057 try:
4058 return self.__cache[idx]
4059 except IndexError as e:
4060 raise self.IndexError(e) from e
483336e7 4061
4062 def __bool__(self):
4063 try:
28419ca2 4064 self[-1] if self.__reversed else self[0]
8e5fecc8 4065 except self.IndexError:
483336e7 4066 return False
4067 return True
4068
4069 def __len__(self):
8e5fecc8 4070 self.__exhaust()
483336e7 4071 return len(self.__cache)
4072
981052c9 4073 def reverse(self):
28419ca2 4074 self.__reversed = not self.__reversed
4075 return self
4076
4077 def __repr__(self):
4078 # repr and str should mimic a list. So we exhaust the iterable
4079 return repr(self.exhaust())
4080
4081 def __str__(self):
4082 return repr(self.exhaust())
4083
483336e7 4084
7be9ccff 4085class PagedList:
dd26ced1
PH
4086 def __len__(self):
4087 # This is only useful for tests
4088 return len(self.getslice())
4089
7be9ccff 4090 def __init__(self, pagefunc, pagesize, use_cache=True):
4091 self._pagefunc = pagefunc
4092 self._pagesize = pagesize
4093 self._use_cache = use_cache
4094 self._cache = {}
4095
4096 def getpage(self, pagenum):
4097 page_results = self._cache.get(pagenum) or list(self._pagefunc(pagenum))
4098 if self._use_cache:
4099 self._cache[pagenum] = page_results
4100 return page_results
4101
4102 def getslice(self, start=0, end=None):
4103 return list(self._getslice(start, end))
4104
4105 def _getslice(self, start, end):
55575225 4106 raise NotImplementedError('This method must be implemented by subclasses')
4107
4108 def __getitem__(self, idx):
7be9ccff 4109 # NOTE: cache must be enabled if this is used
55575225 4110 if not isinstance(idx, int) or idx < 0:
4111 raise TypeError('indices must be non-negative integers')
4112 entries = self.getslice(idx, idx + 1)
4113 return entries[0] if entries else None
4114
9c44d242
PH
4115
4116class OnDemandPagedList(PagedList):
7be9ccff 4117 def _getslice(self, start, end):
b7ab0590
PH
4118 for pagenum in itertools.count(start // self._pagesize):
4119 firstid = pagenum * self._pagesize
4120 nextfirstid = pagenum * self._pagesize + self._pagesize
4121 if start >= nextfirstid:
4122 continue
4123
b7ab0590
PH
4124 startv = (
4125 start % self._pagesize
4126 if firstid <= start < nextfirstid
4127 else 0)
b7ab0590
PH
4128 endv = (
4129 ((end - 1) % self._pagesize) + 1
4130 if (end is not None and firstid <= end <= nextfirstid)
4131 else None)
4132
7be9ccff 4133 page_results = self.getpage(pagenum)
b7ab0590
PH
4134 if startv != 0 or endv is not None:
4135 page_results = page_results[startv:endv]
7be9ccff 4136 yield from page_results
b7ab0590
PH
4137
4138 # A little optimization - if current page is not "full", ie. does
4139 # not contain page_size videos then we can assume that this page
4140 # is the last one - there are no more ids on further pages -
4141 # i.e. no need to query again.
4142 if len(page_results) + startv < self._pagesize:
4143 break
4144
4145 # If we got the whole page, but the next page is not interesting,
4146 # break out early as well
4147 if end == nextfirstid:
4148 break
81c2f20b
PH
4149
4150
9c44d242
PH
4151class InAdvancePagedList(PagedList):
4152 def __init__(self, pagefunc, pagecount, pagesize):
9c44d242 4153 self._pagecount = pagecount
7be9ccff 4154 PagedList.__init__(self, pagefunc, pagesize, True)
9c44d242 4155
7be9ccff 4156 def _getslice(self, start, end):
9c44d242
PH
4157 start_page = start // self._pagesize
4158 end_page = (
4159 self._pagecount if end is None else (end // self._pagesize + 1))
4160 skip_elems = start - start_page * self._pagesize
4161 only_more = None if end is None else end - start
4162 for pagenum in range(start_page, end_page):
7be9ccff 4163 page_results = self.getpage(pagenum)
9c44d242 4164 if skip_elems:
7be9ccff 4165 page_results = page_results[skip_elems:]
9c44d242
PH
4166 skip_elems = None
4167 if only_more is not None:
7be9ccff 4168 if len(page_results) < only_more:
4169 only_more -= len(page_results)
9c44d242 4170 else:
7be9ccff 4171 yield from page_results[:only_more]
9c44d242 4172 break
7be9ccff 4173 yield from page_results
9c44d242
PH
4174
4175
81c2f20b 4176def uppercase_escape(s):
676eb3f2 4177 unicode_escape = codecs.getdecoder('unicode_escape')
81c2f20b 4178 return re.sub(
a612753d 4179 r'\\U[0-9a-fA-F]{8}',
676eb3f2
PH
4180 lambda m: unicode_escape(m.group(0))[0],
4181 s)
0fe2ff78
YCH
4182
4183
4184def lowercase_escape(s):
4185 unicode_escape = codecs.getdecoder('unicode_escape')
4186 return re.sub(
4187 r'\\u[0-9a-fA-F]{4}',
4188 lambda m: unicode_escape(m.group(0))[0],
4189 s)
b53466e1 4190
d05cfe06
S
4191
4192def escape_rfc3986(s):
4193 """Escape non-ASCII characters as suggested by RFC 3986"""
8f9312c3 4194 if sys.version_info < (3, 0) and isinstance(s, compat_str):
d05cfe06 4195 s = s.encode('utf-8')
ecc0c5ee 4196 return compat_urllib_parse.quote(s, b"%/;:@&=+$,!~*'()?#[]")
d05cfe06
S
4197
4198
4199def escape_url(url):
4200 """Escape URL as suggested by RFC 3986"""
4201 url_parsed = compat_urllib_parse_urlparse(url)
4202 return url_parsed._replace(
efbed08d 4203 netloc=url_parsed.netloc.encode('idna').decode('ascii'),
d05cfe06
S
4204 path=escape_rfc3986(url_parsed.path),
4205 params=escape_rfc3986(url_parsed.params),
4206 query=escape_rfc3986(url_parsed.query),
4207 fragment=escape_rfc3986(url_parsed.fragment)
4208 ).geturl()
4209
62e609ab 4210
4dfbf869 4211def parse_qs(url):
4212 return compat_parse_qs(compat_urllib_parse_urlparse(url).query)
4213
4214
62e609ab
PH
4215def read_batch_urls(batch_fd):
4216 def fixup(url):
4217 if not isinstance(url, compat_str):
4218 url = url.decode('utf-8', 'replace')
8c04f0be 4219 BOM_UTF8 = ('\xef\xbb\xbf', '\ufeff')
4220 for bom in BOM_UTF8:
4221 if url.startswith(bom):
4222 url = url[len(bom):]
4223 url = url.lstrip()
4224 if not url or url.startswith(('#', ';', ']')):
62e609ab 4225 return False
8c04f0be 4226 # "#" cannot be stripped out since it is part of the URI
4227 # However, it can be safely stipped out if follwing a whitespace
4228 return re.split(r'\s#', url, 1)[0].rstrip()
62e609ab
PH
4229
4230 with contextlib.closing(batch_fd) as fd:
4231 return [url for url in map(fixup, fd) if url]
b74fa8cd
JMF
4232
4233
4234def urlencode_postdata(*args, **kargs):
15707c7e 4235 return compat_urllib_parse_urlencode(*args, **kargs).encode('ascii')
bcf89ce6
PH
4236
4237
38f9ef31 4238def update_url_query(url, query):
cacd9966
YCH
4239 if not query:
4240 return url
38f9ef31 4241 parsed_url = compat_urlparse.urlparse(url)
4242 qs = compat_parse_qs(parsed_url.query)
4243 qs.update(query)
4244 return compat_urlparse.urlunparse(parsed_url._replace(
15707c7e 4245 query=compat_urllib_parse_urlencode(qs, True)))
16392824 4246
8e60dc75 4247
ed0291d1
S
4248def update_Request(req, url=None, data=None, headers={}, query={}):
4249 req_headers = req.headers.copy()
4250 req_headers.update(headers)
4251 req_data = data or req.data
4252 req_url = update_url_query(url or req.get_full_url(), query)
95cf60e8
S
4253 req_get_method = req.get_method()
4254 if req_get_method == 'HEAD':
4255 req_type = HEADRequest
4256 elif req_get_method == 'PUT':
4257 req_type = PUTRequest
4258 else:
4259 req_type = compat_urllib_request.Request
ed0291d1
S
4260 new_req = req_type(
4261 req_url, data=req_data, headers=req_headers,
4262 origin_req_host=req.origin_req_host, unverifiable=req.unverifiable)
4263 if hasattr(req, 'timeout'):
4264 new_req.timeout = req.timeout
4265 return new_req
4266
4267
10c87c15 4268def _multipart_encode_impl(data, boundary):
0c265486
YCH
4269 content_type = 'multipart/form-data; boundary=%s' % boundary
4270
4271 out = b''
4272 for k, v in data.items():
4273 out += b'--' + boundary.encode('ascii') + b'\r\n'
4274 if isinstance(k, compat_str):
4275 k = k.encode('utf-8')
4276 if isinstance(v, compat_str):
4277 v = v.encode('utf-8')
4278 # RFC 2047 requires non-ASCII field names to be encoded, while RFC 7578
4279 # suggests sending UTF-8 directly. Firefox sends UTF-8, too
b2ad479d 4280 content = b'Content-Disposition: form-data; name="' + k + b'"\r\n\r\n' + v + b'\r\n'
0c265486
YCH
4281 if boundary.encode('ascii') in content:
4282 raise ValueError('Boundary overlaps with data')
4283 out += content
4284
4285 out += b'--' + boundary.encode('ascii') + b'--\r\n'
4286
4287 return out, content_type
4288
4289
4290def multipart_encode(data, boundary=None):
4291 '''
4292 Encode a dict to RFC 7578-compliant form-data
4293
4294 data:
4295 A dict where keys and values can be either Unicode or bytes-like
4296 objects.
4297 boundary:
4298 If specified a Unicode object, it's used as the boundary. Otherwise
4299 a random boundary is generated.
4300
4301 Reference: https://tools.ietf.org/html/rfc7578
4302 '''
4303 has_specified_boundary = boundary is not None
4304
4305 while True:
4306 if boundary is None:
4307 boundary = '---------------' + str(random.randrange(0x0fffffff, 0xffffffff))
4308
4309 try:
10c87c15 4310 out, content_type = _multipart_encode_impl(data, boundary)
0c265486
YCH
4311 break
4312 except ValueError:
4313 if has_specified_boundary:
4314 raise
4315 boundary = None
4316
4317 return out, content_type
4318
4319
86296ad2 4320def dict_get(d, key_or_keys, default=None, skip_false_values=True):
cbecc9b9
S
4321 if isinstance(key_or_keys, (list, tuple)):
4322 for key in key_or_keys:
86296ad2
S
4323 if key not in d or d[key] is None or skip_false_values and not d[key]:
4324 continue
4325 return d[key]
cbecc9b9
S
4326 return default
4327 return d.get(key_or_keys, default)
4328
4329
329ca3be 4330def try_get(src, getter, expected_type=None):
6606817a 4331 for get in variadic(getter):
a32a9a7e
S
4332 try:
4333 v = get(src)
4334 except (AttributeError, KeyError, TypeError, IndexError):
4335 pass
4336 else:
4337 if expected_type is None or isinstance(v, expected_type):
4338 return v
329ca3be
S
4339
4340
6cc62232
S
4341def merge_dicts(*dicts):
4342 merged = {}
4343 for a_dict in dicts:
4344 for k, v in a_dict.items():
4345 if v is None:
4346 continue
3089bc74
S
4347 if (k not in merged
4348 or (isinstance(v, compat_str) and v
4349 and isinstance(merged[k], compat_str)
4350 and not merged[k])):
6cc62232
S
4351 merged[k] = v
4352 return merged
4353
4354
8e60dc75
S
4355def encode_compat_str(string, encoding=preferredencoding(), errors='strict'):
4356 return string if isinstance(string, compat_str) else compat_str(string, encoding, errors)
4357
16392824 4358
a1a530b0
PH
4359US_RATINGS = {
4360 'G': 0,
4361 'PG': 10,
4362 'PG-13': 13,
4363 'R': 16,
4364 'NC': 18,
4365}
fac55558
PH
4366
4367
a8795327 4368TV_PARENTAL_GUIDELINES = {
5a16c9d9
RA
4369 'TV-Y': 0,
4370 'TV-Y7': 7,
4371 'TV-G': 0,
4372 'TV-PG': 0,
4373 'TV-14': 14,
4374 'TV-MA': 17,
a8795327
S
4375}
4376
4377
146c80e2 4378def parse_age_limit(s):
a8795327
S
4379 if type(s) == int:
4380 return s if 0 <= s <= 21 else None
4381 if not isinstance(s, compat_basestring):
d838b1bd 4382 return None
146c80e2 4383 m = re.match(r'^(?P<age>\d{1,2})\+?$', s)
a8795327
S
4384 if m:
4385 return int(m.group('age'))
5c5fae6d 4386 s = s.upper()
a8795327
S
4387 if s in US_RATINGS:
4388 return US_RATINGS[s]
5a16c9d9 4389 m = re.match(r'^TV[_-]?(%s)$' % '|'.join(k[3:] for k in TV_PARENTAL_GUIDELINES), s)
b8361187 4390 if m:
5a16c9d9 4391 return TV_PARENTAL_GUIDELINES['TV-' + m.group(1)]
b8361187 4392 return None
146c80e2
S
4393
4394
fac55558 4395def strip_jsonp(code):
609a61e3 4396 return re.sub(
5552c9eb 4397 r'''(?sx)^
e9c671d5 4398 (?:window\.)?(?P<func_name>[a-zA-Z0-9_.$]*)
5552c9eb
YCH
4399 (?:\s*&&\s*(?P=func_name))?
4400 \s*\(\s*(?P<callback_data>.*)\);?
4401 \s*?(?://[^\n]*)*$''',
4402 r'\g<callback_data>', code)
478c2c61
PH
4403
4404
5c610515 4405def js_to_json(code, vars={}):
4406 # vars is a dict of var, val pairs to substitute
c843e685 4407 COMMENT_RE = r'/\*(?:(?!\*/).)*?\*/|//[^\n]*\n'
4195096e
S
4408 SKIP_RE = r'\s*(?:{comment})?\s*'.format(comment=COMMENT_RE)
4409 INTEGER_TABLE = (
4410 (r'(?s)^(0[xX][0-9a-fA-F]+){skip}:?$'.format(skip=SKIP_RE), 16),
4411 (r'(?s)^(0+[0-7]+){skip}:?$'.format(skip=SKIP_RE), 8),
4412 )
4413
e05f6939 4414 def fix_kv(m):
e7b6d122
PH
4415 v = m.group(0)
4416 if v in ('true', 'false', 'null'):
4417 return v
421ddcb8
C
4418 elif v in ('undefined', 'void 0'):
4419 return 'null'
8bdd16b4 4420 elif v.startswith('/*') or v.startswith('//') or v.startswith('!') or v == ',':
bd1e4844 4421 return ""
4422
4423 if v[0] in ("'", '"'):
4424 v = re.sub(r'(?s)\\.|"', lambda m: {
e7b6d122 4425 '"': '\\"',
bd1e4844 4426 "\\'": "'",
4427 '\\\n': '',
4428 '\\x': '\\u00',
4429 }.get(m.group(0), m.group(0)), v[1:-1])
8bdd16b4 4430 else:
4431 for regex, base in INTEGER_TABLE:
4432 im = re.match(regex, v)
4433 if im:
4434 i = int(im.group(1), base)
4435 return '"%d":' % i if v.endswith(':') else '%d' % i
89ac4a19 4436
5c610515 4437 if v in vars:
4438 return vars[v]
4439
e7b6d122 4440 return '"%s"' % v
e05f6939 4441
bd1e4844 4442 return re.sub(r'''(?sx)
4443 "(?:[^"\\]*(?:\\\\|\\['"nurtbfx/\n]))*[^"\\]*"|
4444 '(?:[^'\\]*(?:\\\\|\\['"nurtbfx/\n]))*[^'\\]*'|
4195096e 4445 {comment}|,(?={skip}[\]}}])|
421ddcb8 4446 void\s0|(?:(?<![0-9])[eE]|[a-df-zA-DF-Z_$])[.a-zA-Z_$0-9]*|
4195096e 4447 \b(?:0[xX][0-9a-fA-F]+|0+[0-7]+)(?:{skip}:)?|
8bdd16b4 4448 [0-9]+(?={skip}:)|
4449 !+
4195096e 4450 '''.format(comment=COMMENT_RE, skip=SKIP_RE), fix_kv, code)
e05f6939
PH
4451
4452
478c2c61
PH
4453def qualities(quality_ids):
4454 """ Get a numeric quality value out of a list of possible values """
4455 def q(qid):
4456 try:
4457 return quality_ids.index(qid)
4458 except ValueError:
4459 return -1
4460 return q
4461
acd69589 4462
de6000d9 4463DEFAULT_OUTTMPL = {
4464 'default': '%(title)s [%(id)s].%(ext)s',
72755351 4465 'chapter': '%(title)s - %(section_number)03d %(section_title)s [%(id)s].%(ext)s',
de6000d9 4466}
4467OUTTMPL_TYPES = {
72755351 4468 'chapter': None,
de6000d9 4469 'subtitle': None,
4470 'thumbnail': None,
4471 'description': 'description',
4472 'annotation': 'annotations.xml',
4473 'infojson': 'info.json',
5112f26a 4474 'pl_thumbnail': None,
de6000d9 4475 'pl_description': 'description',
4476 'pl_infojson': 'info.json',
4477}
0a871f68 4478
143db31d 4479# As of [1] format syntax is:
4480# %[mapping_key][conversion_flags][minimum_width][.precision][length_modifier]type
4481# 1. https://docs.python.org/2/library/stdtypes.html#string-formatting
901130bb 4482STR_FORMAT_RE_TMPL = r'''(?x)
4483 (?<!%)(?P<prefix>(?:%%)*)
143db31d 4484 %
524e2e4f 4485 (?P<has_key>\((?P<key>{0})\))?
752cda38 4486 (?P<format>
524e2e4f 4487 (?P<conversion>[#0\-+ ]+)?
4488 (?P<min_width>\d+)?
4489 (?P<precision>\.\d+)?
4490 (?P<len_mod>[hlL])? # unused in python
901130bb 4491 {1} # conversion type
752cda38 4492 )
143db31d 4493'''
4494
7d1eb38a 4495
901130bb 4496STR_FORMAT_TYPES = 'diouxXeEfFgGcrs'
a020a0dc 4497
7d1eb38a 4498
a020a0dc
PH
4499def limit_length(s, length):
4500 """ Add ellipses to overly long strings """
4501 if s is None:
4502 return None
4503 ELLIPSES = '...'
4504 if len(s) > length:
4505 return s[:length - len(ELLIPSES)] + ELLIPSES
4506 return s
48844745
PH
4507
4508
4509def version_tuple(v):
5f9b8394 4510 return tuple(int(e) for e in re.split(r'[-.]', v))
48844745
PH
4511
4512
4513def is_outdated_version(version, limit, assume_new=True):
4514 if not version:
4515 return not assume_new
4516 try:
4517 return version_tuple(version) < version_tuple(limit)
4518 except ValueError:
4519 return not assume_new
732ea2f0
PH
4520
4521
4522def ytdl_is_updateable():
7a5c1cfe 4523 """ Returns if yt-dlp can be updated with -U """
735d865e 4524 return False
4525
732ea2f0
PH
4526 from zipimport import zipimporter
4527
4528 return isinstance(globals().get('__loader__'), zipimporter) or hasattr(sys, 'frozen')
7d4111ed
PH
4529
4530
4531def args_to_str(args):
4532 # Get a short string representation for a subprocess command
702ccf2d 4533 return ' '.join(compat_shlex_quote(a) for a in args)
2ccd1b10
PH
4534
4535
9b9c5355 4536def error_to_compat_str(err):
fdae2358
S
4537 err_str = str(err)
4538 # On python 2 error byte string must be decoded with proper
4539 # encoding rather than ascii
4540 if sys.version_info[0] < 3:
4541 err_str = err_str.decode(preferredencoding())
4542 return err_str
4543
4544
c460bdd5 4545def mimetype2ext(mt):
eb9ee194
S
4546 if mt is None:
4547 return None
4548
9359f3d4
F
4549 mt, _, params = mt.partition(';')
4550 mt = mt.strip()
4551
4552 FULL_MAP = {
765ac263 4553 'audio/mp4': 'm4a',
6c33d24b
YCH
4554 # Per RFC 3003, audio/mpeg can be .mp1, .mp2 or .mp3. Here use .mp3 as
4555 # it's the most popular one
4556 'audio/mpeg': 'mp3',
ba39289d 4557 'audio/x-wav': 'wav',
9359f3d4
F
4558 'audio/wav': 'wav',
4559 'audio/wave': 'wav',
4560 }
4561
4562 ext = FULL_MAP.get(mt)
765ac263
JMF
4563 if ext is not None:
4564 return ext
4565
9359f3d4 4566 SUBTYPE_MAP = {
f6861ec9 4567 '3gpp': '3gp',
cafcf657 4568 'smptett+xml': 'tt',
cafcf657 4569 'ttaf+xml': 'dfxp',
a0d8d704 4570 'ttml+xml': 'ttml',
f6861ec9 4571 'x-flv': 'flv',
a0d8d704 4572 'x-mp4-fragmented': 'mp4',
d4f05d47 4573 'x-ms-sami': 'sami',
a0d8d704 4574 'x-ms-wmv': 'wmv',
b4173f15
RA
4575 'mpegurl': 'm3u8',
4576 'x-mpegurl': 'm3u8',
4577 'vnd.apple.mpegurl': 'm3u8',
4578 'dash+xml': 'mpd',
b4173f15 4579 'f4m+xml': 'f4m',
f164b971 4580 'hds+xml': 'f4m',
e910fe2f 4581 'vnd.ms-sstr+xml': 'ism',
c2b2c7e1 4582 'quicktime': 'mov',
98ce1a3f 4583 'mp2t': 'ts',
39e7107d 4584 'x-wav': 'wav',
9359f3d4
F
4585 'filmstrip+json': 'fs',
4586 'svg+xml': 'svg',
4587 }
4588
4589 _, _, subtype = mt.rpartition('/')
4590 ext = SUBTYPE_MAP.get(subtype.lower())
4591 if ext is not None:
4592 return ext
4593
4594 SUFFIX_MAP = {
4595 'json': 'json',
4596 'xml': 'xml',
4597 'zip': 'zip',
4598 'gzip': 'gz',
4599 }
4600
4601 _, _, suffix = subtype.partition('+')
4602 ext = SUFFIX_MAP.get(suffix)
4603 if ext is not None:
4604 return ext
4605
4606 return subtype.replace('+', '.')
c460bdd5
PH
4607
4608
4f3c5e06 4609def parse_codecs(codecs_str):
4610 # http://tools.ietf.org/html/rfc6381
4611 if not codecs_str:
4612 return {}
a0566bbf 4613 split_codecs = list(filter(None, map(
dbf5416a 4614 str.strip, codecs_str.strip().strip(',').split(','))))
4f3c5e06 4615 vcodec, acodec = None, None
a0566bbf 4616 for full_codec in split_codecs:
4f3c5e06 4617 codec = full_codec.split('.')[0]
28cc2241 4618 if codec in ('avc1', 'avc2', 'avc3', 'avc4', 'vp9', 'vp8', 'hev1', 'hev2', 'h263', 'h264', 'mp4v', 'hvc1', 'av01', 'theora'):
4f3c5e06 4619 if not vcodec:
4620 vcodec = full_codec
60f5c9fb 4621 elif codec in ('mp4a', 'opus', 'vorbis', 'mp3', 'aac', 'ac-3', 'ec-3', 'eac3', 'dtsc', 'dtse', 'dtsh', 'dtsl'):
4f3c5e06 4622 if not acodec:
4623 acodec = full_codec
4624 else:
60f5c9fb 4625 write_string('WARNING: Unknown codec %s\n' % full_codec, sys.stderr)
4f3c5e06 4626 if not vcodec and not acodec:
a0566bbf 4627 if len(split_codecs) == 2:
4f3c5e06 4628 return {
a0566bbf 4629 'vcodec': split_codecs[0],
4630 'acodec': split_codecs[1],
4f3c5e06 4631 }
4632 else:
4633 return {
4634 'vcodec': vcodec or 'none',
4635 'acodec': acodec or 'none',
4636 }
4637 return {}
4638
4639
2ccd1b10 4640def urlhandle_detect_ext(url_handle):
79298173 4641 getheader = url_handle.headers.get
2ccd1b10 4642
b55ee18f
PH
4643 cd = getheader('Content-Disposition')
4644 if cd:
4645 m = re.match(r'attachment;\s*filename="(?P<filename>[^"]+)"', cd)
4646 if m:
4647 e = determine_ext(m.group('filename'), default_ext=None)
4648 if e:
4649 return e
4650
c460bdd5 4651 return mimetype2ext(getheader('Content-Type'))
05900629
PH
4652
4653
1e399778
YCH
4654def encode_data_uri(data, mime_type):
4655 return 'data:%s;base64,%s' % (mime_type, base64.b64encode(data).decode('ascii'))
4656
4657
05900629 4658def age_restricted(content_limit, age_limit):
6ec6cb4e 4659 """ Returns True iff the content should be blocked """
05900629
PH
4660
4661 if age_limit is None: # No limit set
4662 return False
4663 if content_limit is None:
4664 return False # Content available for everyone
4665 return age_limit < content_limit
61ca9a80
PH
4666
4667
4668def is_html(first_bytes):
4669 """ Detect whether a file contains HTML by examining its first bytes. """
4670
4671 BOMS = [
4672 (b'\xef\xbb\xbf', 'utf-8'),
4673 (b'\x00\x00\xfe\xff', 'utf-32-be'),
4674 (b'\xff\xfe\x00\x00', 'utf-32-le'),
4675 (b'\xff\xfe', 'utf-16-le'),
4676 (b'\xfe\xff', 'utf-16-be'),
4677 ]
4678 for bom, enc in BOMS:
4679 if first_bytes.startswith(bom):
4680 s = first_bytes[len(bom):].decode(enc, 'replace')
4681 break
4682 else:
4683 s = first_bytes.decode('utf-8', 'replace')
4684
4685 return re.match(r'^\s*<', s)
a055469f
PH
4686
4687
4688def determine_protocol(info_dict):
4689 protocol = info_dict.get('protocol')
4690 if protocol is not None:
4691 return protocol
4692
4693 url = info_dict['url']
4694 if url.startswith('rtmp'):
4695 return 'rtmp'
4696 elif url.startswith('mms'):
4697 return 'mms'
4698 elif url.startswith('rtsp'):
4699 return 'rtsp'
4700
4701 ext = determine_ext(url)
4702 if ext == 'm3u8':
4703 return 'm3u8'
4704 elif ext == 'f4m':
4705 return 'f4m'
4706
4707 return compat_urllib_parse_urlparse(url).scheme
cfb56d1a
PH
4708
4709
76d321f6 4710def render_table(header_row, data, delim=False, extraGap=0, hideEmpty=False):
cfb56d1a 4711 """ Render a list of rows, each as a list of values """
76d321f6 4712
4713 def get_max_lens(table):
4714 return [max(len(compat_str(v)) for v in col) for col in zip(*table)]
4715
4716 def filter_using_list(row, filterArray):
4717 return [col for (take, col) in zip(filterArray, row) if take]
4718
4719 if hideEmpty:
4720 max_lens = get_max_lens(data)
4721 header_row = filter_using_list(header_row, max_lens)
4722 data = [filter_using_list(row, max_lens) for row in data]
4723
cfb56d1a 4724 table = [header_row] + data
76d321f6 4725 max_lens = get_max_lens(table)
4726 if delim:
4727 table = [header_row] + [['-' * ml for ml in max_lens]] + data
4728 format_str = ' '.join('%-' + compat_str(ml + extraGap) + 's' for ml in max_lens[:-1]) + ' %s'
cfb56d1a 4729 return '\n'.join(format_str % tuple(row) for row in table)
347de493
PH
4730
4731
8f18aca8 4732def _match_one(filter_part, dct, incomplete):
77b87f05 4733 # TODO: Generalize code with YoutubeDL._build_format_filter
a047eeb6 4734 STRING_OPERATORS = {
4735 '*=': operator.contains,
4736 '^=': lambda attr, value: attr.startswith(value),
4737 '$=': lambda attr, value: attr.endswith(value),
4738 '~=': lambda attr, value: re.search(value, attr),
4739 }
347de493 4740 COMPARISON_OPERATORS = {
a047eeb6 4741 **STRING_OPERATORS,
4742 '<=': operator.le, # "<=" must be defined above "<"
347de493 4743 '<': operator.lt,
347de493 4744 '>=': operator.ge,
a047eeb6 4745 '>': operator.gt,
347de493 4746 '=': operator.eq,
347de493 4747 }
a047eeb6 4748
347de493
PH
4749 operator_rex = re.compile(r'''(?x)\s*
4750 (?P<key>[a-z_]+)
77b87f05 4751 \s*(?P<negation>!\s*)?(?P<op>%s)(?P<none_inclusive>\s*\?)?\s*
347de493
PH
4752 (?:
4753 (?P<intval>[0-9.]+(?:[kKmMgGtTpPeEzZyY]i?[Bb]?)?)|
a047eeb6 4754 (?P<quote>["\'])(?P<quotedstrval>.+?)(?P=quote)|
4755 (?P<strval>.+?)
347de493
PH
4756 )
4757 \s*$
4758 ''' % '|'.join(map(re.escape, COMPARISON_OPERATORS.keys())))
4759 m = operator_rex.search(filter_part)
4760 if m:
77b87f05
MT
4761 unnegated_op = COMPARISON_OPERATORS[m.group('op')]
4762 if m.group('negation'):
4763 op = lambda attr, value: not unnegated_op(attr, value)
4764 else:
4765 op = unnegated_op
e5a088dc 4766 actual_value = dct.get(m.group('key'))
3089bc74
S
4767 if (m.group('quotedstrval') is not None
4768 or m.group('strval') is not None
e5a088dc
S
4769 # If the original field is a string and matching comparisonvalue is
4770 # a number we should respect the origin of the original field
4771 # and process comparison value as a string (see
067aa17e 4772 # https://github.com/ytdl-org/youtube-dl/issues/11082).
3089bc74
S
4773 or actual_value is not None and m.group('intval') is not None
4774 and isinstance(actual_value, compat_str)):
db13c16e
S
4775 comparison_value = m.group('quotedstrval') or m.group('strval') or m.group('intval')
4776 quote = m.group('quote')
4777 if quote is not None:
4778 comparison_value = comparison_value.replace(r'\%s' % quote, quote)
347de493 4779 else:
a047eeb6 4780 if m.group('op') in STRING_OPERATORS:
4781 raise ValueError('Operator %s only supports string values!' % m.group('op'))
347de493
PH
4782 try:
4783 comparison_value = int(m.group('intval'))
4784 except ValueError:
4785 comparison_value = parse_filesize(m.group('intval'))
4786 if comparison_value is None:
4787 comparison_value = parse_filesize(m.group('intval') + 'B')
4788 if comparison_value is None:
4789 raise ValueError(
4790 'Invalid integer value %r in filter part %r' % (
4791 m.group('intval'), filter_part))
347de493 4792 if actual_value is None:
8f18aca8 4793 return incomplete or m.group('none_inclusive')
347de493
PH
4794 return op(actual_value, comparison_value)
4795
4796 UNARY_OPERATORS = {
1cc47c66
S
4797 '': lambda v: (v is True) if isinstance(v, bool) else (v is not None),
4798 '!': lambda v: (v is False) if isinstance(v, bool) else (v is None),
347de493
PH
4799 }
4800 operator_rex = re.compile(r'''(?x)\s*
4801 (?P<op>%s)\s*(?P<key>[a-z_]+)
4802 \s*$
4803 ''' % '|'.join(map(re.escape, UNARY_OPERATORS.keys())))
4804 m = operator_rex.search(filter_part)
4805 if m:
4806 op = UNARY_OPERATORS[m.group('op')]
4807 actual_value = dct.get(m.group('key'))
8f18aca8 4808 if incomplete and actual_value is None:
4809 return True
347de493
PH
4810 return op(actual_value)
4811
4812 raise ValueError('Invalid filter part %r' % filter_part)
4813
4814
8f18aca8 4815def match_str(filter_str, dct, incomplete=False):
4816 """ Filter a dictionary with a simple string syntax. Returns True (=passes filter) or false
4817 When incomplete, all conditions passes on missing fields
4818 """
347de493 4819 return all(
8f18aca8 4820 _match_one(filter_part.replace(r'\&', '&'), dct, incomplete)
a047eeb6 4821 for filter_part in re.split(r'(?<!\\)&', filter_str))
347de493
PH
4822
4823
4824def match_filter_func(filter_str):
8f18aca8 4825 def _match_func(info_dict, *args, **kwargs):
4826 if match_str(filter_str, info_dict, *args, **kwargs):
347de493
PH
4827 return None
4828 else:
4829 video_title = info_dict.get('title', info_dict.get('id', 'video'))
4830 return '%s does not pass filter %s, skipping ..' % (video_title, filter_str)
4831 return _match_func
91410c9b
PH
4832
4833
bf6427d2
YCH
4834def parse_dfxp_time_expr(time_expr):
4835 if not time_expr:
d631d5f9 4836 return
bf6427d2
YCH
4837
4838 mobj = re.match(r'^(?P<time_offset>\d+(?:\.\d+)?)s?$', time_expr)
4839 if mobj:
4840 return float(mobj.group('time_offset'))
4841
db2fe38b 4842 mobj = re.match(r'^(\d+):(\d\d):(\d\d(?:(?:\.|:)\d+)?)$', time_expr)
bf6427d2 4843 if mobj:
db2fe38b 4844 return 3600 * int(mobj.group(1)) + 60 * int(mobj.group(2)) + float(mobj.group(3).replace(':', '.'))
bf6427d2
YCH
4845
4846
c1c924ab
YCH
4847def srt_subtitles_timecode(seconds):
4848 return '%02d:%02d:%02d,%03d' % (seconds / 3600, (seconds % 3600) / 60, seconds % 60, (seconds % 1) * 1000)
bf6427d2
YCH
4849
4850
4851def dfxp2srt(dfxp_data):
3869028f
YCH
4852 '''
4853 @param dfxp_data A bytes-like object containing DFXP data
4854 @returns A unicode object containing converted SRT data
4855 '''
5b995f71 4856 LEGACY_NAMESPACES = (
3869028f
YCH
4857 (b'http://www.w3.org/ns/ttml', [
4858 b'http://www.w3.org/2004/11/ttaf1',
4859 b'http://www.w3.org/2006/04/ttaf1',
4860 b'http://www.w3.org/2006/10/ttaf1',
5b995f71 4861 ]),
3869028f
YCH
4862 (b'http://www.w3.org/ns/ttml#styling', [
4863 b'http://www.w3.org/ns/ttml#style',
5b995f71
RA
4864 ]),
4865 )
4866
4867 SUPPORTED_STYLING = [
4868 'color',
4869 'fontFamily',
4870 'fontSize',
4871 'fontStyle',
4872 'fontWeight',
4873 'textDecoration'
4874 ]
4875
4e335771 4876 _x = functools.partial(xpath_with_ns, ns_map={
261f4730 4877 'xml': 'http://www.w3.org/XML/1998/namespace',
4e335771 4878 'ttml': 'http://www.w3.org/ns/ttml',
5b995f71 4879 'tts': 'http://www.w3.org/ns/ttml#styling',
4e335771 4880 })
bf6427d2 4881
5b995f71
RA
4882 styles = {}
4883 default_style = {}
4884
87de7069 4885 class TTMLPElementParser(object):
5b995f71
RA
4886 _out = ''
4887 _unclosed_elements = []
4888 _applied_styles = []
bf6427d2 4889
2b14cb56 4890 def start(self, tag, attrib):
5b995f71
RA
4891 if tag in (_x('ttml:br'), 'br'):
4892 self._out += '\n'
4893 else:
4894 unclosed_elements = []
4895 style = {}
4896 element_style_id = attrib.get('style')
4897 if default_style:
4898 style.update(default_style)
4899 if element_style_id:
4900 style.update(styles.get(element_style_id, {}))
4901 for prop in SUPPORTED_STYLING:
4902 prop_val = attrib.get(_x('tts:' + prop))
4903 if prop_val:
4904 style[prop] = prop_val
4905 if style:
4906 font = ''
4907 for k, v in sorted(style.items()):
4908 if self._applied_styles and self._applied_styles[-1].get(k) == v:
4909 continue
4910 if k == 'color':
4911 font += ' color="%s"' % v
4912 elif k == 'fontSize':
4913 font += ' size="%s"' % v
4914 elif k == 'fontFamily':
4915 font += ' face="%s"' % v
4916 elif k == 'fontWeight' and v == 'bold':
4917 self._out += '<b>'
4918 unclosed_elements.append('b')
4919 elif k == 'fontStyle' and v == 'italic':
4920 self._out += '<i>'
4921 unclosed_elements.append('i')
4922 elif k == 'textDecoration' and v == 'underline':
4923 self._out += '<u>'
4924 unclosed_elements.append('u')
4925 if font:
4926 self._out += '<font' + font + '>'
4927 unclosed_elements.append('font')
4928 applied_style = {}
4929 if self._applied_styles:
4930 applied_style.update(self._applied_styles[-1])
4931 applied_style.update(style)
4932 self._applied_styles.append(applied_style)
4933 self._unclosed_elements.append(unclosed_elements)
bf6427d2 4934
2b14cb56 4935 def end(self, tag):
5b995f71
RA
4936 if tag not in (_x('ttml:br'), 'br'):
4937 unclosed_elements = self._unclosed_elements.pop()
4938 for element in reversed(unclosed_elements):
4939 self._out += '</%s>' % element
4940 if unclosed_elements and self._applied_styles:
4941 self._applied_styles.pop()
bf6427d2 4942
2b14cb56 4943 def data(self, data):
5b995f71 4944 self._out += data
2b14cb56 4945
4946 def close(self):
5b995f71 4947 return self._out.strip()
2b14cb56 4948
4949 def parse_node(node):
4950 target = TTMLPElementParser()
4951 parser = xml.etree.ElementTree.XMLParser(target=target)
4952 parser.feed(xml.etree.ElementTree.tostring(node))
4953 return parser.close()
bf6427d2 4954
5b995f71
RA
4955 for k, v in LEGACY_NAMESPACES:
4956 for ns in v:
4957 dfxp_data = dfxp_data.replace(ns, k)
4958
3869028f 4959 dfxp = compat_etree_fromstring(dfxp_data)
bf6427d2 4960 out = []
5b995f71 4961 paras = dfxp.findall(_x('.//ttml:p')) or dfxp.findall('.//p')
1b0427e6
YCH
4962
4963 if not paras:
4964 raise ValueError('Invalid dfxp/TTML subtitle')
bf6427d2 4965
5b995f71
RA
4966 repeat = False
4967 while True:
4968 for style in dfxp.findall(_x('.//ttml:style')):
261f4730
RA
4969 style_id = style.get('id') or style.get(_x('xml:id'))
4970 if not style_id:
4971 continue
5b995f71
RA
4972 parent_style_id = style.get('style')
4973 if parent_style_id:
4974 if parent_style_id not in styles:
4975 repeat = True
4976 continue
4977 styles[style_id] = styles[parent_style_id].copy()
4978 for prop in SUPPORTED_STYLING:
4979 prop_val = style.get(_x('tts:' + prop))
4980 if prop_val:
4981 styles.setdefault(style_id, {})[prop] = prop_val
4982 if repeat:
4983 repeat = False
4984 else:
4985 break
4986
4987 for p in ('body', 'div'):
4988 ele = xpath_element(dfxp, [_x('.//ttml:' + p), './/' + p])
4989 if ele is None:
4990 continue
4991 style = styles.get(ele.get('style'))
4992 if not style:
4993 continue
4994 default_style.update(style)
4995
bf6427d2 4996 for para, index in zip(paras, itertools.count(1)):
d631d5f9 4997 begin_time = parse_dfxp_time_expr(para.attrib.get('begin'))
7dff0363 4998 end_time = parse_dfxp_time_expr(para.attrib.get('end'))
d631d5f9
YCH
4999 dur = parse_dfxp_time_expr(para.attrib.get('dur'))
5000 if begin_time is None:
5001 continue
7dff0363 5002 if not end_time:
d631d5f9
YCH
5003 if not dur:
5004 continue
5005 end_time = begin_time + dur
bf6427d2
YCH
5006 out.append('%d\n%s --> %s\n%s\n\n' % (
5007 index,
c1c924ab
YCH
5008 srt_subtitles_timecode(begin_time),
5009 srt_subtitles_timecode(end_time),
bf6427d2
YCH
5010 parse_node(para)))
5011
5012 return ''.join(out)
5013
5014
66e289ba
S
5015def cli_option(params, command_option, param):
5016 param = params.get(param)
98e698f1
RA
5017 if param:
5018 param = compat_str(param)
66e289ba
S
5019 return [command_option, param] if param is not None else []
5020
5021
5022def cli_bool_option(params, command_option, param, true_value='true', false_value='false', separator=None):
5023 param = params.get(param)
5b232f46
S
5024 if param is None:
5025 return []
66e289ba
S
5026 assert isinstance(param, bool)
5027 if separator:
5028 return [command_option + separator + (true_value if param else false_value)]
5029 return [command_option, true_value if param else false_value]
5030
5031
5032def cli_valueless_option(params, command_option, param, expected_value=True):
5033 param = params.get(param)
5034 return [command_option] if param == expected_value else []
5035
5036
e92caff5 5037def cli_configuration_args(argdict, keys, default=[], use_compat=True):
eab9b2bc 5038 if isinstance(argdict, (list, tuple)): # for backward compatibility
e92caff5 5039 if use_compat:
5b1ecbb3 5040 return argdict
5041 else:
5042 argdict = None
eab9b2bc 5043 if argdict is None:
5b1ecbb3 5044 return default
eab9b2bc 5045 assert isinstance(argdict, dict)
5046
e92caff5 5047 assert isinstance(keys, (list, tuple))
5048 for key_list in keys:
e92caff5 5049 arg_list = list(filter(
5050 lambda x: x is not None,
6606817a 5051 [argdict.get(key.lower()) for key in variadic(key_list)]))
e92caff5 5052 if arg_list:
5053 return [arg for args in arg_list for arg in args]
5054 return default
66e289ba 5055
6251555f 5056
330690a2 5057def _configuration_args(main_key, argdict, exe, keys=None, default=[], use_compat=True):
5058 main_key, exe = main_key.lower(), exe.lower()
5059 root_key = exe if main_key == exe else f'{main_key}+{exe}'
5060 keys = [f'{root_key}{k}' for k in (keys or [''])]
5061 if root_key in keys:
5062 if main_key != exe:
5063 keys.append((main_key, exe))
5064 keys.append('default')
5065 else:
5066 use_compat = False
5067 return cli_configuration_args(argdict, keys, default, use_compat)
5068
66e289ba 5069
39672624
YCH
5070class ISO639Utils(object):
5071 # See http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt
5072 _lang_map = {
5073 'aa': 'aar',
5074 'ab': 'abk',
5075 'ae': 'ave',
5076 'af': 'afr',
5077 'ak': 'aka',
5078 'am': 'amh',
5079 'an': 'arg',
5080 'ar': 'ara',
5081 'as': 'asm',
5082 'av': 'ava',
5083 'ay': 'aym',
5084 'az': 'aze',
5085 'ba': 'bak',
5086 'be': 'bel',
5087 'bg': 'bul',
5088 'bh': 'bih',
5089 'bi': 'bis',
5090 'bm': 'bam',
5091 'bn': 'ben',
5092 'bo': 'bod',
5093 'br': 'bre',
5094 'bs': 'bos',
5095 'ca': 'cat',
5096 'ce': 'che',
5097 'ch': 'cha',
5098 'co': 'cos',
5099 'cr': 'cre',
5100 'cs': 'ces',
5101 'cu': 'chu',
5102 'cv': 'chv',
5103 'cy': 'cym',
5104 'da': 'dan',
5105 'de': 'deu',
5106 'dv': 'div',
5107 'dz': 'dzo',
5108 'ee': 'ewe',
5109 'el': 'ell',
5110 'en': 'eng',
5111 'eo': 'epo',
5112 'es': 'spa',
5113 'et': 'est',
5114 'eu': 'eus',
5115 'fa': 'fas',
5116 'ff': 'ful',
5117 'fi': 'fin',
5118 'fj': 'fij',
5119 'fo': 'fao',
5120 'fr': 'fra',
5121 'fy': 'fry',
5122 'ga': 'gle',
5123 'gd': 'gla',
5124 'gl': 'glg',
5125 'gn': 'grn',
5126 'gu': 'guj',
5127 'gv': 'glv',
5128 'ha': 'hau',
5129 'he': 'heb',
b7acc835 5130 'iw': 'heb', # Replaced by he in 1989 revision
39672624
YCH
5131 'hi': 'hin',
5132 'ho': 'hmo',
5133 'hr': 'hrv',
5134 'ht': 'hat',
5135 'hu': 'hun',
5136 'hy': 'hye',
5137 'hz': 'her',
5138 'ia': 'ina',
5139 'id': 'ind',
b7acc835 5140 'in': 'ind', # Replaced by id in 1989 revision
39672624
YCH
5141 'ie': 'ile',
5142 'ig': 'ibo',
5143 'ii': 'iii',
5144 'ik': 'ipk',
5145 'io': 'ido',
5146 'is': 'isl',
5147 'it': 'ita',
5148 'iu': 'iku',
5149 'ja': 'jpn',
5150 'jv': 'jav',
5151 'ka': 'kat',
5152 'kg': 'kon',
5153 'ki': 'kik',
5154 'kj': 'kua',
5155 'kk': 'kaz',
5156 'kl': 'kal',
5157 'km': 'khm',
5158 'kn': 'kan',
5159 'ko': 'kor',
5160 'kr': 'kau',
5161 'ks': 'kas',
5162 'ku': 'kur',
5163 'kv': 'kom',
5164 'kw': 'cor',
5165 'ky': 'kir',
5166 'la': 'lat',
5167 'lb': 'ltz',
5168 'lg': 'lug',
5169 'li': 'lim',
5170 'ln': 'lin',
5171 'lo': 'lao',
5172 'lt': 'lit',
5173 'lu': 'lub',
5174 'lv': 'lav',
5175 'mg': 'mlg',
5176 'mh': 'mah',
5177 'mi': 'mri',
5178 'mk': 'mkd',
5179 'ml': 'mal',
5180 'mn': 'mon',
5181 'mr': 'mar',
5182 'ms': 'msa',
5183 'mt': 'mlt',
5184 'my': 'mya',
5185 'na': 'nau',
5186 'nb': 'nob',
5187 'nd': 'nde',
5188 'ne': 'nep',
5189 'ng': 'ndo',
5190 'nl': 'nld',
5191 'nn': 'nno',
5192 'no': 'nor',
5193 'nr': 'nbl',
5194 'nv': 'nav',
5195 'ny': 'nya',
5196 'oc': 'oci',
5197 'oj': 'oji',
5198 'om': 'orm',
5199 'or': 'ori',
5200 'os': 'oss',
5201 'pa': 'pan',
5202 'pi': 'pli',
5203 'pl': 'pol',
5204 'ps': 'pus',
5205 'pt': 'por',
5206 'qu': 'que',
5207 'rm': 'roh',
5208 'rn': 'run',
5209 'ro': 'ron',
5210 'ru': 'rus',
5211 'rw': 'kin',
5212 'sa': 'san',
5213 'sc': 'srd',
5214 'sd': 'snd',
5215 'se': 'sme',
5216 'sg': 'sag',
5217 'si': 'sin',
5218 'sk': 'slk',
5219 'sl': 'slv',
5220 'sm': 'smo',
5221 'sn': 'sna',
5222 'so': 'som',
5223 'sq': 'sqi',
5224 'sr': 'srp',
5225 'ss': 'ssw',
5226 'st': 'sot',
5227 'su': 'sun',
5228 'sv': 'swe',
5229 'sw': 'swa',
5230 'ta': 'tam',
5231 'te': 'tel',
5232 'tg': 'tgk',
5233 'th': 'tha',
5234 'ti': 'tir',
5235 'tk': 'tuk',
5236 'tl': 'tgl',
5237 'tn': 'tsn',
5238 'to': 'ton',
5239 'tr': 'tur',
5240 'ts': 'tso',
5241 'tt': 'tat',
5242 'tw': 'twi',
5243 'ty': 'tah',
5244 'ug': 'uig',
5245 'uk': 'ukr',
5246 'ur': 'urd',
5247 'uz': 'uzb',
5248 've': 'ven',
5249 'vi': 'vie',
5250 'vo': 'vol',
5251 'wa': 'wln',
5252 'wo': 'wol',
5253 'xh': 'xho',
5254 'yi': 'yid',
e9a50fba 5255 'ji': 'yid', # Replaced by yi in 1989 revision
39672624
YCH
5256 'yo': 'yor',
5257 'za': 'zha',
5258 'zh': 'zho',
5259 'zu': 'zul',
5260 }
5261
5262 @classmethod
5263 def short2long(cls, code):
5264 """Convert language code from ISO 639-1 to ISO 639-2/T"""
5265 return cls._lang_map.get(code[:2])
5266
5267 @classmethod
5268 def long2short(cls, code):
5269 """Convert language code from ISO 639-2/T to ISO 639-1"""
5270 for short_name, long_name in cls._lang_map.items():
5271 if long_name == code:
5272 return short_name
5273
5274
4eb10f66
YCH
5275class ISO3166Utils(object):
5276 # From http://data.okfn.org/data/core/country-list
5277 _country_map = {
5278 'AF': 'Afghanistan',
5279 'AX': 'Åland Islands',
5280 'AL': 'Albania',
5281 'DZ': 'Algeria',
5282 'AS': 'American Samoa',
5283 'AD': 'Andorra',
5284 'AO': 'Angola',
5285 'AI': 'Anguilla',
5286 'AQ': 'Antarctica',
5287 'AG': 'Antigua and Barbuda',
5288 'AR': 'Argentina',
5289 'AM': 'Armenia',
5290 'AW': 'Aruba',
5291 'AU': 'Australia',
5292 'AT': 'Austria',
5293 'AZ': 'Azerbaijan',
5294 'BS': 'Bahamas',
5295 'BH': 'Bahrain',
5296 'BD': 'Bangladesh',
5297 'BB': 'Barbados',
5298 'BY': 'Belarus',
5299 'BE': 'Belgium',
5300 'BZ': 'Belize',
5301 'BJ': 'Benin',
5302 'BM': 'Bermuda',
5303 'BT': 'Bhutan',
5304 'BO': 'Bolivia, Plurinational State of',
5305 'BQ': 'Bonaire, Sint Eustatius and Saba',
5306 'BA': 'Bosnia and Herzegovina',
5307 'BW': 'Botswana',
5308 'BV': 'Bouvet Island',
5309 'BR': 'Brazil',
5310 'IO': 'British Indian Ocean Territory',
5311 'BN': 'Brunei Darussalam',
5312 'BG': 'Bulgaria',
5313 'BF': 'Burkina Faso',
5314 'BI': 'Burundi',
5315 'KH': 'Cambodia',
5316 'CM': 'Cameroon',
5317 'CA': 'Canada',
5318 'CV': 'Cape Verde',
5319 'KY': 'Cayman Islands',
5320 'CF': 'Central African Republic',
5321 'TD': 'Chad',
5322 'CL': 'Chile',
5323 'CN': 'China',
5324 'CX': 'Christmas Island',
5325 'CC': 'Cocos (Keeling) Islands',
5326 'CO': 'Colombia',
5327 'KM': 'Comoros',
5328 'CG': 'Congo',
5329 'CD': 'Congo, the Democratic Republic of the',
5330 'CK': 'Cook Islands',
5331 'CR': 'Costa Rica',
5332 'CI': 'Côte d\'Ivoire',
5333 'HR': 'Croatia',
5334 'CU': 'Cuba',
5335 'CW': 'Curaçao',
5336 'CY': 'Cyprus',
5337 'CZ': 'Czech Republic',
5338 'DK': 'Denmark',
5339 'DJ': 'Djibouti',
5340 'DM': 'Dominica',
5341 'DO': 'Dominican Republic',
5342 'EC': 'Ecuador',
5343 'EG': 'Egypt',
5344 'SV': 'El Salvador',
5345 'GQ': 'Equatorial Guinea',
5346 'ER': 'Eritrea',
5347 'EE': 'Estonia',
5348 'ET': 'Ethiopia',
5349 'FK': 'Falkland Islands (Malvinas)',
5350 'FO': 'Faroe Islands',
5351 'FJ': 'Fiji',
5352 'FI': 'Finland',
5353 'FR': 'France',
5354 'GF': 'French Guiana',
5355 'PF': 'French Polynesia',
5356 'TF': 'French Southern Territories',
5357 'GA': 'Gabon',
5358 'GM': 'Gambia',
5359 'GE': 'Georgia',
5360 'DE': 'Germany',
5361 'GH': 'Ghana',
5362 'GI': 'Gibraltar',
5363 'GR': 'Greece',
5364 'GL': 'Greenland',
5365 'GD': 'Grenada',
5366 'GP': 'Guadeloupe',
5367 'GU': 'Guam',
5368 'GT': 'Guatemala',
5369 'GG': 'Guernsey',
5370 'GN': 'Guinea',
5371 'GW': 'Guinea-Bissau',
5372 'GY': 'Guyana',
5373 'HT': 'Haiti',
5374 'HM': 'Heard Island and McDonald Islands',
5375 'VA': 'Holy See (Vatican City State)',
5376 'HN': 'Honduras',
5377 'HK': 'Hong Kong',
5378 'HU': 'Hungary',
5379 'IS': 'Iceland',
5380 'IN': 'India',
5381 'ID': 'Indonesia',
5382 'IR': 'Iran, Islamic Republic of',
5383 'IQ': 'Iraq',
5384 'IE': 'Ireland',
5385 'IM': 'Isle of Man',
5386 'IL': 'Israel',
5387 'IT': 'Italy',
5388 'JM': 'Jamaica',
5389 'JP': 'Japan',
5390 'JE': 'Jersey',
5391 'JO': 'Jordan',
5392 'KZ': 'Kazakhstan',
5393 'KE': 'Kenya',
5394 'KI': 'Kiribati',
5395 'KP': 'Korea, Democratic People\'s Republic of',
5396 'KR': 'Korea, Republic of',
5397 'KW': 'Kuwait',
5398 'KG': 'Kyrgyzstan',
5399 'LA': 'Lao People\'s Democratic Republic',
5400 'LV': 'Latvia',
5401 'LB': 'Lebanon',
5402 'LS': 'Lesotho',
5403 'LR': 'Liberia',
5404 'LY': 'Libya',
5405 'LI': 'Liechtenstein',
5406 'LT': 'Lithuania',
5407 'LU': 'Luxembourg',
5408 'MO': 'Macao',
5409 'MK': 'Macedonia, the Former Yugoslav Republic of',
5410 'MG': 'Madagascar',
5411 'MW': 'Malawi',
5412 'MY': 'Malaysia',
5413 'MV': 'Maldives',
5414 'ML': 'Mali',
5415 'MT': 'Malta',
5416 'MH': 'Marshall Islands',
5417 'MQ': 'Martinique',
5418 'MR': 'Mauritania',
5419 'MU': 'Mauritius',
5420 'YT': 'Mayotte',
5421 'MX': 'Mexico',
5422 'FM': 'Micronesia, Federated States of',
5423 'MD': 'Moldova, Republic of',
5424 'MC': 'Monaco',
5425 'MN': 'Mongolia',
5426 'ME': 'Montenegro',
5427 'MS': 'Montserrat',
5428 'MA': 'Morocco',
5429 'MZ': 'Mozambique',
5430 'MM': 'Myanmar',
5431 'NA': 'Namibia',
5432 'NR': 'Nauru',
5433 'NP': 'Nepal',
5434 'NL': 'Netherlands',
5435 'NC': 'New Caledonia',
5436 'NZ': 'New Zealand',
5437 'NI': 'Nicaragua',
5438 'NE': 'Niger',
5439 'NG': 'Nigeria',
5440 'NU': 'Niue',
5441 'NF': 'Norfolk Island',
5442 'MP': 'Northern Mariana Islands',
5443 'NO': 'Norway',
5444 'OM': 'Oman',
5445 'PK': 'Pakistan',
5446 'PW': 'Palau',
5447 'PS': 'Palestine, State of',
5448 'PA': 'Panama',
5449 'PG': 'Papua New Guinea',
5450 'PY': 'Paraguay',
5451 'PE': 'Peru',
5452 'PH': 'Philippines',
5453 'PN': 'Pitcairn',
5454 'PL': 'Poland',
5455 'PT': 'Portugal',
5456 'PR': 'Puerto Rico',
5457 'QA': 'Qatar',
5458 'RE': 'Réunion',
5459 'RO': 'Romania',
5460 'RU': 'Russian Federation',
5461 'RW': 'Rwanda',
5462 'BL': 'Saint Barthélemy',
5463 'SH': 'Saint Helena, Ascension and Tristan da Cunha',
5464 'KN': 'Saint Kitts and Nevis',
5465 'LC': 'Saint Lucia',
5466 'MF': 'Saint Martin (French part)',
5467 'PM': 'Saint Pierre and Miquelon',
5468 'VC': 'Saint Vincent and the Grenadines',
5469 'WS': 'Samoa',
5470 'SM': 'San Marino',
5471 'ST': 'Sao Tome and Principe',
5472 'SA': 'Saudi Arabia',
5473 'SN': 'Senegal',
5474 'RS': 'Serbia',
5475 'SC': 'Seychelles',
5476 'SL': 'Sierra Leone',
5477 'SG': 'Singapore',
5478 'SX': 'Sint Maarten (Dutch part)',
5479 'SK': 'Slovakia',
5480 'SI': 'Slovenia',
5481 'SB': 'Solomon Islands',
5482 'SO': 'Somalia',
5483 'ZA': 'South Africa',
5484 'GS': 'South Georgia and the South Sandwich Islands',
5485 'SS': 'South Sudan',
5486 'ES': 'Spain',
5487 'LK': 'Sri Lanka',
5488 'SD': 'Sudan',
5489 'SR': 'Suriname',
5490 'SJ': 'Svalbard and Jan Mayen',
5491 'SZ': 'Swaziland',
5492 'SE': 'Sweden',
5493 'CH': 'Switzerland',
5494 'SY': 'Syrian Arab Republic',
5495 'TW': 'Taiwan, Province of China',
5496 'TJ': 'Tajikistan',
5497 'TZ': 'Tanzania, United Republic of',
5498 'TH': 'Thailand',
5499 'TL': 'Timor-Leste',
5500 'TG': 'Togo',
5501 'TK': 'Tokelau',
5502 'TO': 'Tonga',
5503 'TT': 'Trinidad and Tobago',
5504 'TN': 'Tunisia',
5505 'TR': 'Turkey',
5506 'TM': 'Turkmenistan',
5507 'TC': 'Turks and Caicos Islands',
5508 'TV': 'Tuvalu',
5509 'UG': 'Uganda',
5510 'UA': 'Ukraine',
5511 'AE': 'United Arab Emirates',
5512 'GB': 'United Kingdom',
5513 'US': 'United States',
5514 'UM': 'United States Minor Outlying Islands',
5515 'UY': 'Uruguay',
5516 'UZ': 'Uzbekistan',
5517 'VU': 'Vanuatu',
5518 'VE': 'Venezuela, Bolivarian Republic of',
5519 'VN': 'Viet Nam',
5520 'VG': 'Virgin Islands, British',
5521 'VI': 'Virgin Islands, U.S.',
5522 'WF': 'Wallis and Futuna',
5523 'EH': 'Western Sahara',
5524 'YE': 'Yemen',
5525 'ZM': 'Zambia',
5526 'ZW': 'Zimbabwe',
5527 }
5528
5529 @classmethod
5530 def short2full(cls, code):
5531 """Convert an ISO 3166-2 country code to the corresponding full name"""
5532 return cls._country_map.get(code.upper())
5533
5534
773f291d
S
5535class GeoUtils(object):
5536 # Major IPv4 address blocks per country
5537 _country_ip_map = {
53896ca5 5538 'AD': '46.172.224.0/19',
773f291d
S
5539 'AE': '94.200.0.0/13',
5540 'AF': '149.54.0.0/17',
5541 'AG': '209.59.64.0/18',
5542 'AI': '204.14.248.0/21',
5543 'AL': '46.99.0.0/16',
5544 'AM': '46.70.0.0/15',
5545 'AO': '105.168.0.0/13',
53896ca5
S
5546 'AP': '182.50.184.0/21',
5547 'AQ': '23.154.160.0/24',
773f291d
S
5548 'AR': '181.0.0.0/12',
5549 'AS': '202.70.112.0/20',
53896ca5 5550 'AT': '77.116.0.0/14',
773f291d
S
5551 'AU': '1.128.0.0/11',
5552 'AW': '181.41.0.0/18',
53896ca5
S
5553 'AX': '185.217.4.0/22',
5554 'AZ': '5.197.0.0/16',
773f291d
S
5555 'BA': '31.176.128.0/17',
5556 'BB': '65.48.128.0/17',
5557 'BD': '114.130.0.0/16',
5558 'BE': '57.0.0.0/8',
53896ca5 5559 'BF': '102.178.0.0/15',
773f291d
S
5560 'BG': '95.42.0.0/15',
5561 'BH': '37.131.0.0/17',
5562 'BI': '154.117.192.0/18',
5563 'BJ': '137.255.0.0/16',
53896ca5 5564 'BL': '185.212.72.0/23',
773f291d
S
5565 'BM': '196.12.64.0/18',
5566 'BN': '156.31.0.0/16',
5567 'BO': '161.56.0.0/16',
5568 'BQ': '161.0.80.0/20',
53896ca5 5569 'BR': '191.128.0.0/12',
773f291d
S
5570 'BS': '24.51.64.0/18',
5571 'BT': '119.2.96.0/19',
5572 'BW': '168.167.0.0/16',
5573 'BY': '178.120.0.0/13',
5574 'BZ': '179.42.192.0/18',
5575 'CA': '99.224.0.0/11',
5576 'CD': '41.243.0.0/16',
53896ca5
S
5577 'CF': '197.242.176.0/21',
5578 'CG': '160.113.0.0/16',
773f291d 5579 'CH': '85.0.0.0/13',
53896ca5 5580 'CI': '102.136.0.0/14',
773f291d
S
5581 'CK': '202.65.32.0/19',
5582 'CL': '152.172.0.0/14',
53896ca5 5583 'CM': '102.244.0.0/14',
773f291d
S
5584 'CN': '36.128.0.0/10',
5585 'CO': '181.240.0.0/12',
5586 'CR': '201.192.0.0/12',
5587 'CU': '152.206.0.0/15',
5588 'CV': '165.90.96.0/19',
5589 'CW': '190.88.128.0/17',
53896ca5 5590 'CY': '31.153.0.0/16',
773f291d
S
5591 'CZ': '88.100.0.0/14',
5592 'DE': '53.0.0.0/8',
5593 'DJ': '197.241.0.0/17',
5594 'DK': '87.48.0.0/12',
5595 'DM': '192.243.48.0/20',
5596 'DO': '152.166.0.0/15',
5597 'DZ': '41.96.0.0/12',
5598 'EC': '186.68.0.0/15',
5599 'EE': '90.190.0.0/15',
5600 'EG': '156.160.0.0/11',
5601 'ER': '196.200.96.0/20',
5602 'ES': '88.0.0.0/11',
5603 'ET': '196.188.0.0/14',
5604 'EU': '2.16.0.0/13',
5605 'FI': '91.152.0.0/13',
5606 'FJ': '144.120.0.0/16',
53896ca5 5607 'FK': '80.73.208.0/21',
773f291d
S
5608 'FM': '119.252.112.0/20',
5609 'FO': '88.85.32.0/19',
5610 'FR': '90.0.0.0/9',
5611 'GA': '41.158.0.0/15',
5612 'GB': '25.0.0.0/8',
5613 'GD': '74.122.88.0/21',
5614 'GE': '31.146.0.0/16',
5615 'GF': '161.22.64.0/18',
5616 'GG': '62.68.160.0/19',
53896ca5
S
5617 'GH': '154.160.0.0/12',
5618 'GI': '95.164.0.0/16',
773f291d
S
5619 'GL': '88.83.0.0/19',
5620 'GM': '160.182.0.0/15',
5621 'GN': '197.149.192.0/18',
5622 'GP': '104.250.0.0/19',
5623 'GQ': '105.235.224.0/20',
5624 'GR': '94.64.0.0/13',
5625 'GT': '168.234.0.0/16',
5626 'GU': '168.123.0.0/16',
5627 'GW': '197.214.80.0/20',
5628 'GY': '181.41.64.0/18',
5629 'HK': '113.252.0.0/14',
5630 'HN': '181.210.0.0/16',
5631 'HR': '93.136.0.0/13',
5632 'HT': '148.102.128.0/17',
5633 'HU': '84.0.0.0/14',
5634 'ID': '39.192.0.0/10',
5635 'IE': '87.32.0.0/12',
5636 'IL': '79.176.0.0/13',
5637 'IM': '5.62.80.0/20',
5638 'IN': '117.192.0.0/10',
5639 'IO': '203.83.48.0/21',
5640 'IQ': '37.236.0.0/14',
5641 'IR': '2.176.0.0/12',
5642 'IS': '82.221.0.0/16',
5643 'IT': '79.0.0.0/10',
5644 'JE': '87.244.64.0/18',
5645 'JM': '72.27.0.0/17',
5646 'JO': '176.29.0.0/16',
53896ca5 5647 'JP': '133.0.0.0/8',
773f291d
S
5648 'KE': '105.48.0.0/12',
5649 'KG': '158.181.128.0/17',
5650 'KH': '36.37.128.0/17',
5651 'KI': '103.25.140.0/22',
5652 'KM': '197.255.224.0/20',
53896ca5 5653 'KN': '198.167.192.0/19',
773f291d
S
5654 'KP': '175.45.176.0/22',
5655 'KR': '175.192.0.0/10',
5656 'KW': '37.36.0.0/14',
5657 'KY': '64.96.0.0/15',
5658 'KZ': '2.72.0.0/13',
5659 'LA': '115.84.64.0/18',
5660 'LB': '178.135.0.0/16',
53896ca5 5661 'LC': '24.92.144.0/20',
773f291d
S
5662 'LI': '82.117.0.0/19',
5663 'LK': '112.134.0.0/15',
53896ca5 5664 'LR': '102.183.0.0/16',
773f291d
S
5665 'LS': '129.232.0.0/17',
5666 'LT': '78.56.0.0/13',
5667 'LU': '188.42.0.0/16',
5668 'LV': '46.109.0.0/16',
5669 'LY': '41.252.0.0/14',
5670 'MA': '105.128.0.0/11',
5671 'MC': '88.209.64.0/18',
5672 'MD': '37.246.0.0/16',
5673 'ME': '178.175.0.0/17',
5674 'MF': '74.112.232.0/21',
5675 'MG': '154.126.0.0/17',
5676 'MH': '117.103.88.0/21',
5677 'MK': '77.28.0.0/15',
5678 'ML': '154.118.128.0/18',
5679 'MM': '37.111.0.0/17',
5680 'MN': '49.0.128.0/17',
5681 'MO': '60.246.0.0/16',
5682 'MP': '202.88.64.0/20',
5683 'MQ': '109.203.224.0/19',
5684 'MR': '41.188.64.0/18',
5685 'MS': '208.90.112.0/22',
5686 'MT': '46.11.0.0/16',
5687 'MU': '105.16.0.0/12',
5688 'MV': '27.114.128.0/18',
53896ca5 5689 'MW': '102.70.0.0/15',
773f291d
S
5690 'MX': '187.192.0.0/11',
5691 'MY': '175.136.0.0/13',
5692 'MZ': '197.218.0.0/15',
5693 'NA': '41.182.0.0/16',
5694 'NC': '101.101.0.0/18',
5695 'NE': '197.214.0.0/18',
5696 'NF': '203.17.240.0/22',
5697 'NG': '105.112.0.0/12',
5698 'NI': '186.76.0.0/15',
5699 'NL': '145.96.0.0/11',
5700 'NO': '84.208.0.0/13',
5701 'NP': '36.252.0.0/15',
5702 'NR': '203.98.224.0/19',
5703 'NU': '49.156.48.0/22',
5704 'NZ': '49.224.0.0/14',
5705 'OM': '5.36.0.0/15',
5706 'PA': '186.72.0.0/15',
5707 'PE': '186.160.0.0/14',
5708 'PF': '123.50.64.0/18',
5709 'PG': '124.240.192.0/19',
5710 'PH': '49.144.0.0/13',
5711 'PK': '39.32.0.0/11',
5712 'PL': '83.0.0.0/11',
5713 'PM': '70.36.0.0/20',
5714 'PR': '66.50.0.0/16',
5715 'PS': '188.161.0.0/16',
5716 'PT': '85.240.0.0/13',
5717 'PW': '202.124.224.0/20',
5718 'PY': '181.120.0.0/14',
5719 'QA': '37.210.0.0/15',
53896ca5 5720 'RE': '102.35.0.0/16',
773f291d 5721 'RO': '79.112.0.0/13',
53896ca5 5722 'RS': '93.86.0.0/15',
773f291d 5723 'RU': '5.136.0.0/13',
53896ca5 5724 'RW': '41.186.0.0/16',
773f291d
S
5725 'SA': '188.48.0.0/13',
5726 'SB': '202.1.160.0/19',
5727 'SC': '154.192.0.0/11',
53896ca5 5728 'SD': '102.120.0.0/13',
773f291d 5729 'SE': '78.64.0.0/12',
53896ca5 5730 'SG': '8.128.0.0/10',
773f291d
S
5731 'SI': '188.196.0.0/14',
5732 'SK': '78.98.0.0/15',
53896ca5 5733 'SL': '102.143.0.0/17',
773f291d
S
5734 'SM': '89.186.32.0/19',
5735 'SN': '41.82.0.0/15',
53896ca5 5736 'SO': '154.115.192.0/18',
773f291d
S
5737 'SR': '186.179.128.0/17',
5738 'SS': '105.235.208.0/21',
5739 'ST': '197.159.160.0/19',
5740 'SV': '168.243.0.0/16',
5741 'SX': '190.102.0.0/20',
5742 'SY': '5.0.0.0/16',
5743 'SZ': '41.84.224.0/19',
5744 'TC': '65.255.48.0/20',
5745 'TD': '154.68.128.0/19',
5746 'TG': '196.168.0.0/14',
5747 'TH': '171.96.0.0/13',
5748 'TJ': '85.9.128.0/18',
5749 'TK': '27.96.24.0/21',
5750 'TL': '180.189.160.0/20',
5751 'TM': '95.85.96.0/19',
5752 'TN': '197.0.0.0/11',
5753 'TO': '175.176.144.0/21',
5754 'TR': '78.160.0.0/11',
5755 'TT': '186.44.0.0/15',
5756 'TV': '202.2.96.0/19',
5757 'TW': '120.96.0.0/11',
5758 'TZ': '156.156.0.0/14',
53896ca5
S
5759 'UA': '37.52.0.0/14',
5760 'UG': '102.80.0.0/13',
5761 'US': '6.0.0.0/8',
773f291d 5762 'UY': '167.56.0.0/13',
53896ca5 5763 'UZ': '84.54.64.0/18',
773f291d 5764 'VA': '212.77.0.0/19',
53896ca5 5765 'VC': '207.191.240.0/21',
773f291d 5766 'VE': '186.88.0.0/13',
53896ca5 5767 'VG': '66.81.192.0/20',
773f291d
S
5768 'VI': '146.226.0.0/16',
5769 'VN': '14.160.0.0/11',
5770 'VU': '202.80.32.0/20',
5771 'WF': '117.20.32.0/21',
5772 'WS': '202.4.32.0/19',
5773 'YE': '134.35.0.0/16',
5774 'YT': '41.242.116.0/22',
5775 'ZA': '41.0.0.0/11',
53896ca5
S
5776 'ZM': '102.144.0.0/13',
5777 'ZW': '102.177.192.0/18',
773f291d
S
5778 }
5779
5780 @classmethod
5f95927a
S
5781 def random_ipv4(cls, code_or_block):
5782 if len(code_or_block) == 2:
5783 block = cls._country_ip_map.get(code_or_block.upper())
5784 if not block:
5785 return None
5786 else:
5787 block = code_or_block
773f291d
S
5788 addr, preflen = block.split('/')
5789 addr_min = compat_struct_unpack('!L', socket.inet_aton(addr))[0]
5790 addr_max = addr_min | (0xffffffff >> int(preflen))
18a0defa 5791 return compat_str(socket.inet_ntoa(
4248dad9 5792 compat_struct_pack('!L', random.randint(addr_min, addr_max))))
773f291d
S
5793
5794
91410c9b 5795class PerRequestProxyHandler(compat_urllib_request.ProxyHandler):
2461f79d
PH
5796 def __init__(self, proxies=None):
5797 # Set default handlers
5798 for type in ('http', 'https'):
5799 setattr(self, '%s_open' % type,
5800 lambda r, proxy='__noproxy__', type=type, meth=self.proxy_open:
5801 meth(r, proxy, type))
38e87f6c 5802 compat_urllib_request.ProxyHandler.__init__(self, proxies)
2461f79d 5803
91410c9b 5804 def proxy_open(self, req, proxy, type):
2461f79d 5805 req_proxy = req.headers.get('Ytdl-request-proxy')
91410c9b
PH
5806 if req_proxy is not None:
5807 proxy = req_proxy
2461f79d
PH
5808 del req.headers['Ytdl-request-proxy']
5809
5810 if proxy == '__noproxy__':
5811 return None # No Proxy
51fb4995 5812 if compat_urlparse.urlparse(proxy).scheme.lower() in ('socks', 'socks4', 'socks4a', 'socks5'):
71aff188 5813 req.add_header('Ytdl-socks-proxy', proxy)
7a5c1cfe 5814 # yt-dlp's http/https handlers do wrapping the socket with socks
71aff188 5815 return None
91410c9b
PH
5816 return compat_urllib_request.ProxyHandler.proxy_open(
5817 self, req, proxy, type)
5bc880b9
YCH
5818
5819
0a5445dd
YCH
5820# Both long_to_bytes and bytes_to_long are adapted from PyCrypto, which is
5821# released into Public Domain
5822# https://github.com/dlitz/pycrypto/blob/master/lib/Crypto/Util/number.py#L387
5823
5824def long_to_bytes(n, blocksize=0):
5825 """long_to_bytes(n:long, blocksize:int) : string
5826 Convert a long integer to a byte string.
5827
5828 If optional blocksize is given and greater than zero, pad the front of the
5829 byte string with binary zeros so that the length is a multiple of
5830 blocksize.
5831 """
5832 # after much testing, this algorithm was deemed to be the fastest
5833 s = b''
5834 n = int(n)
5835 while n > 0:
5836 s = compat_struct_pack('>I', n & 0xffffffff) + s
5837 n = n >> 32
5838 # strip off leading zeros
5839 for i in range(len(s)):
5840 if s[i] != b'\000'[0]:
5841 break
5842 else:
5843 # only happens when n == 0
5844 s = b'\000'
5845 i = 0
5846 s = s[i:]
5847 # add back some pad bytes. this could be done more efficiently w.r.t. the
5848 # de-padding being done above, but sigh...
5849 if blocksize > 0 and len(s) % blocksize:
5850 s = (blocksize - len(s) % blocksize) * b'\000' + s
5851 return s
5852
5853
5854def bytes_to_long(s):
5855 """bytes_to_long(string) : long
5856 Convert a byte string to a long integer.
5857
5858 This is (essentially) the inverse of long_to_bytes().
5859 """
5860 acc = 0
5861 length = len(s)
5862 if length % 4:
5863 extra = (4 - length % 4)
5864 s = b'\000' * extra + s
5865 length = length + extra
5866 for i in range(0, length, 4):
5867 acc = (acc << 32) + compat_struct_unpack('>I', s[i:i + 4])[0]
5868 return acc
5869
5870
5bc880b9
YCH
5871def ohdave_rsa_encrypt(data, exponent, modulus):
5872 '''
5873 Implement OHDave's RSA algorithm. See http://www.ohdave.com/rsa/
5874
5875 Input:
5876 data: data to encrypt, bytes-like object
5877 exponent, modulus: parameter e and N of RSA algorithm, both integer
5878 Output: hex string of encrypted data
5879
5880 Limitation: supports one block encryption only
5881 '''
5882
5883 payload = int(binascii.hexlify(data[::-1]), 16)
5884 encrypted = pow(payload, exponent, modulus)
5885 return '%x' % encrypted
81bdc8fd
YCH
5886
5887
f48409c7
YCH
5888def pkcs1pad(data, length):
5889 """
5890 Padding input data with PKCS#1 scheme
5891
5892 @param {int[]} data input data
5893 @param {int} length target length
5894 @returns {int[]} padded data
5895 """
5896 if len(data) > length - 11:
5897 raise ValueError('Input data too long for PKCS#1 padding')
5898
5899 pseudo_random = [random.randint(0, 254) for _ in range(length - len(data) - 3)]
5900 return [0, 2] + pseudo_random + [0] + data
5901
5902
5eb6bdce 5903def encode_base_n(num, n, table=None):
59f898b7 5904 FULL_TABLE = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
59f898b7
YCH
5905 if not table:
5906 table = FULL_TABLE[:n]
5907
5eb6bdce
YCH
5908 if n > len(table):
5909 raise ValueError('base %d exceeds table length %d' % (n, len(table)))
5910
5911 if num == 0:
5912 return table[0]
5913
81bdc8fd
YCH
5914 ret = ''
5915 while num:
5916 ret = table[num % n] + ret
5917 num = num // n
5918 return ret
f52354a8
YCH
5919
5920
5921def decode_packed_codes(code):
06b3fe29 5922 mobj = re.search(PACKED_CODES_RE, code)
a0566bbf 5923 obfuscated_code, base, count, symbols = mobj.groups()
f52354a8
YCH
5924 base = int(base)
5925 count = int(count)
5926 symbols = symbols.split('|')
5927 symbol_table = {}
5928
5929 while count:
5930 count -= 1
5eb6bdce 5931 base_n_count = encode_base_n(count, base)
f52354a8
YCH
5932 symbol_table[base_n_count] = symbols[count] or base_n_count
5933
5934 return re.sub(
5935 r'\b(\w+)\b', lambda mobj: symbol_table[mobj.group(0)],
a0566bbf 5936 obfuscated_code)
e154c651 5937
5938
1ced2221
S
5939def caesar(s, alphabet, shift):
5940 if shift == 0:
5941 return s
5942 l = len(alphabet)
5943 return ''.join(
5944 alphabet[(alphabet.index(c) + shift) % l] if c in alphabet else c
5945 for c in s)
5946
5947
5948def rot47(s):
5949 return caesar(s, r'''!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~''', 47)
5950
5951
e154c651 5952def parse_m3u8_attributes(attrib):
5953 info = {}
5954 for (key, val) in re.findall(r'(?P<key>[A-Z0-9-]+)=(?P<val>"[^"]+"|[^",]+)(?:,|$)', attrib):
5955 if val.startswith('"'):
5956 val = val[1:-1]
5957 info[key] = val
5958 return info
1143535d
YCH
5959
5960
5961def urshift(val, n):
5962 return val >> n if val >= 0 else (val + 0x100000000) >> n
d3f8e038
YCH
5963
5964
5965# Based on png2str() written by @gdkchan and improved by @yokrysty
067aa17e 5966# Originally posted at https://github.com/ytdl-org/youtube-dl/issues/9706
d3f8e038
YCH
5967def decode_png(png_data):
5968 # Reference: https://www.w3.org/TR/PNG/
5969 header = png_data[8:]
5970
5971 if png_data[:8] != b'\x89PNG\x0d\x0a\x1a\x0a' or header[4:8] != b'IHDR':
5972 raise IOError('Not a valid PNG file.')
5973
5974 int_map = {1: '>B', 2: '>H', 4: '>I'}
5975 unpack_integer = lambda x: compat_struct_unpack(int_map[len(x)], x)[0]
5976
5977 chunks = []
5978
5979 while header:
5980 length = unpack_integer(header[:4])
5981 header = header[4:]
5982
5983 chunk_type = header[:4]
5984 header = header[4:]
5985
5986 chunk_data = header[:length]
5987 header = header[length:]
5988
5989 header = header[4:] # Skip CRC
5990
5991 chunks.append({
5992 'type': chunk_type,
5993 'length': length,
5994 'data': chunk_data
5995 })
5996
5997 ihdr = chunks[0]['data']
5998
5999 width = unpack_integer(ihdr[:4])
6000 height = unpack_integer(ihdr[4:8])
6001
6002 idat = b''
6003
6004 for chunk in chunks:
6005 if chunk['type'] == b'IDAT':
6006 idat += chunk['data']
6007
6008 if not idat:
6009 raise IOError('Unable to read PNG data.')
6010
6011 decompressed_data = bytearray(zlib.decompress(idat))
6012
6013 stride = width * 3
6014 pixels = []
6015
6016 def _get_pixel(idx):
6017 x = idx % stride
6018 y = idx // stride
6019 return pixels[y][x]
6020
6021 for y in range(height):
6022 basePos = y * (1 + stride)
6023 filter_type = decompressed_data[basePos]
6024
6025 current_row = []
6026
6027 pixels.append(current_row)
6028
6029 for x in range(stride):
6030 color = decompressed_data[1 + basePos + x]
6031 basex = y * stride + x
6032 left = 0
6033 up = 0
6034
6035 if x > 2:
6036 left = _get_pixel(basex - 3)
6037 if y > 0:
6038 up = _get_pixel(basex - stride)
6039
6040 if filter_type == 1: # Sub
6041 color = (color + left) & 0xff
6042 elif filter_type == 2: # Up
6043 color = (color + up) & 0xff
6044 elif filter_type == 3: # Average
6045 color = (color + ((left + up) >> 1)) & 0xff
6046 elif filter_type == 4: # Paeth
6047 a = left
6048 b = up
6049 c = 0
6050
6051 if x > 2 and y > 0:
6052 c = _get_pixel(basex - stride - 3)
6053
6054 p = a + b - c
6055
6056 pa = abs(p - a)
6057 pb = abs(p - b)
6058 pc = abs(p - c)
6059
6060 if pa <= pb and pa <= pc:
6061 color = (color + a) & 0xff
6062 elif pb <= pc:
6063 color = (color + b) & 0xff
6064 else:
6065 color = (color + c) & 0xff
6066
6067 current_row.append(color)
6068
6069 return width, height, pixels
efa97bdc
YCH
6070
6071
6072def write_xattr(path, key, value):
6073 # This mess below finds the best xattr tool for the job
6074 try:
6075 # try the pyxattr module...
6076 import xattr
6077
53a7e3d2
YCH
6078 if hasattr(xattr, 'set'): # pyxattr
6079 # Unicode arguments are not supported in python-pyxattr until
6080 # version 0.5.0
067aa17e 6081 # See https://github.com/ytdl-org/youtube-dl/issues/5498
53a7e3d2
YCH
6082 pyxattr_required_version = '0.5.0'
6083 if version_tuple(xattr.__version__) < version_tuple(pyxattr_required_version):
6084 # TODO: fallback to CLI tools
6085 raise XAttrUnavailableError(
6086 'python-pyxattr is detected but is too old. '
7a5c1cfe 6087 'yt-dlp requires %s or above while your version is %s. '
53a7e3d2
YCH
6088 'Falling back to other xattr implementations' % (
6089 pyxattr_required_version, xattr.__version__))
6090
6091 setxattr = xattr.set
6092 else: # xattr
6093 setxattr = xattr.setxattr
efa97bdc
YCH
6094
6095 try:
53a7e3d2 6096 setxattr(path, key, value)
efa97bdc
YCH
6097 except EnvironmentError as e:
6098 raise XAttrMetadataError(e.errno, e.strerror)
6099
6100 except ImportError:
6101 if compat_os_name == 'nt':
6102 # Write xattrs to NTFS Alternate Data Streams:
6103 # http://en.wikipedia.org/wiki/NTFS#Alternate_data_streams_.28ADS.29
6104 assert ':' not in key
6105 assert os.path.exists(path)
6106
6107 ads_fn = path + ':' + key
6108 try:
6109 with open(ads_fn, 'wb') as f:
6110 f.write(value)
6111 except EnvironmentError as e:
6112 raise XAttrMetadataError(e.errno, e.strerror)
6113 else:
6114 user_has_setfattr = check_executable('setfattr', ['--version'])
6115 user_has_xattr = check_executable('xattr', ['-h'])
6116
6117 if user_has_setfattr or user_has_xattr:
6118
6119 value = value.decode('utf-8')
6120 if user_has_setfattr:
6121 executable = 'setfattr'
6122 opts = ['-n', key, '-v', value]
6123 elif user_has_xattr:
6124 executable = 'xattr'
6125 opts = ['-w', key, value]
6126
3089bc74
S
6127 cmd = ([encodeFilename(executable, True)]
6128 + [encodeArgument(o) for o in opts]
6129 + [encodeFilename(path, True)])
efa97bdc
YCH
6130
6131 try:
6132 p = subprocess.Popen(
6133 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
6134 except EnvironmentError as e:
6135 raise XAttrMetadataError(e.errno, e.strerror)
f5b1bca9 6136 stdout, stderr = process_communicate_or_kill(p)
efa97bdc
YCH
6137 stderr = stderr.decode('utf-8', 'replace')
6138 if p.returncode != 0:
6139 raise XAttrMetadataError(p.returncode, stderr)
6140
6141 else:
6142 # On Unix, and can't find pyxattr, setfattr, or xattr.
6143 if sys.platform.startswith('linux'):
6144 raise XAttrUnavailableError(
6145 "Couldn't find a tool to set the xattrs. "
6146 "Install either the python 'pyxattr' or 'xattr' "
6147 "modules, or the GNU 'attr' package "
6148 "(which contains the 'setfattr' tool).")
6149 else:
6150 raise XAttrUnavailableError(
6151 "Couldn't find a tool to set the xattrs. "
6152 "Install either the python 'xattr' module, "
6153 "or the 'xattr' binary.")
0c265486
YCH
6154
6155
6156def random_birthday(year_field, month_field, day_field):
aa374bc7
AS
6157 start_date = datetime.date(1950, 1, 1)
6158 end_date = datetime.date(1995, 12, 31)
6159 offset = random.randint(0, (end_date - start_date).days)
6160 random_date = start_date + datetime.timedelta(offset)
0c265486 6161 return {
aa374bc7
AS
6162 year_field: str(random_date.year),
6163 month_field: str(random_date.month),
6164 day_field: str(random_date.day),
0c265486 6165 }
732044af 6166
c76eb41b 6167
732044af 6168# Templates for internet shortcut files, which are plain text files.
6169DOT_URL_LINK_TEMPLATE = '''
6170[InternetShortcut]
6171URL=%(url)s
6172'''.lstrip()
6173
6174DOT_WEBLOC_LINK_TEMPLATE = '''
6175<?xml version="1.0" encoding="UTF-8"?>
6176<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
6177<plist version="1.0">
6178<dict>
6179\t<key>URL</key>
6180\t<string>%(url)s</string>
6181</dict>
6182</plist>
6183'''.lstrip()
6184
6185DOT_DESKTOP_LINK_TEMPLATE = '''
6186[Desktop Entry]
6187Encoding=UTF-8
6188Name=%(filename)s
6189Type=Link
6190URL=%(url)s
6191Icon=text-html
6192'''.lstrip()
6193
6194
6195def iri_to_uri(iri):
6196 """
6197 Converts an IRI (Internationalized Resource Identifier, allowing Unicode characters) to a URI (Uniform Resource Identifier, ASCII-only).
6198
6199 The function doesn't add an additional layer of escaping; e.g., it doesn't escape `%3C` as `%253C`. Instead, it percent-escapes characters with an underlying UTF-8 encoding *besides* those already escaped, leaving the URI intact.
6200 """
6201
6202 iri_parts = compat_urllib_parse_urlparse(iri)
6203
6204 if '[' in iri_parts.netloc:
6205 raise ValueError('IPv6 URIs are not, yet, supported.')
6206 # Querying `.netloc`, when there's only one bracket, also raises a ValueError.
6207
6208 # The `safe` argument values, that the following code uses, contain the characters that should not be percent-encoded. Everything else but letters, digits and '_.-' will be percent-encoded with an underlying UTF-8 encoding. Everything already percent-encoded will be left as is.
6209
6210 net_location = ''
6211 if iri_parts.username:
6212 net_location += compat_urllib_parse_quote(iri_parts.username, safe=r"!$%&'()*+,~")
6213 if iri_parts.password is not None:
6214 net_location += ':' + compat_urllib_parse_quote(iri_parts.password, safe=r"!$%&'()*+,~")
6215 net_location += '@'
6216
6217 net_location += iri_parts.hostname.encode('idna').decode('utf-8') # Punycode for Unicode hostnames.
6218 # The 'idna' encoding produces ASCII text.
6219 if iri_parts.port is not None and iri_parts.port != 80:
6220 net_location += ':' + str(iri_parts.port)
6221
6222 return compat_urllib_parse_urlunparse(
6223 (iri_parts.scheme,
6224 net_location,
6225
6226 compat_urllib_parse_quote_plus(iri_parts.path, safe=r"!$%&'()*+,/:;=@|~"),
6227
6228 # Unsure about the `safe` argument, since this is a legacy way of handling parameters.
6229 compat_urllib_parse_quote_plus(iri_parts.params, safe=r"!$%&'()*+,/:;=@|~"),
6230
6231 # Not totally sure about the `safe` argument, since the source does not explicitly mention the query URI component.
6232 compat_urllib_parse_quote_plus(iri_parts.query, safe=r"!$%&'()*+,/:;=?@{|}~"),
6233
6234 compat_urllib_parse_quote_plus(iri_parts.fragment, safe=r"!#$%&'()*+,/:;=?@{|}~")))
6235
6236 # Source for `safe` arguments: https://url.spec.whatwg.org/#percent-encoded-bytes.
6237
6238
6239def to_high_limit_path(path):
6240 if sys.platform in ['win32', 'cygwin']:
6241 # Work around MAX_PATH limitation on Windows. The maximum allowed length for the individual path segments may still be quite limited.
6242 return r'\\?\ '.rstrip() + os.path.abspath(path)
6243
6244 return path
76d321f6 6245
c76eb41b 6246
b868936c 6247def format_field(obj, field=None, template='%s', ignore=(None, ''), default='', func=None):
6248 if field is None:
6249 val = obj if obj is not None else default
6250 else:
6251 val = obj.get(field, default)
76d321f6 6252 if func and val not in ignore:
6253 val = func(val)
6254 return template % val if val not in ignore else default
00dd0cd5 6255
6256
6257def clean_podcast_url(url):
6258 return re.sub(r'''(?x)
6259 (?:
6260 (?:
6261 chtbl\.com/track|
6262 media\.blubrry\.com| # https://create.blubrry.com/resources/podcast-media-download-statistics/getting-started/
6263 play\.podtrac\.com
6264 )/[^/]+|
6265 (?:dts|www)\.podtrac\.com/(?:pts/)?redirect\.[0-9a-z]{3,4}| # http://analytics.podtrac.com/how-to-measure
6266 flex\.acast\.com|
6267 pd(?:
6268 cn\.co| # https://podcorn.com/analytics-prefix/
6269 st\.fm # https://podsights.com/docs/
6270 )/e
6271 )/''', '', url)
ffcb8191
THD
6272
6273
6274_HEX_TABLE = '0123456789abcdef'
6275
6276
6277def random_uuidv4():
6278 return re.sub(r'[xy]', lambda x: _HEX_TABLE[random.randint(0, 15)], 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx')
0202b52a 6279
6280
6281def make_dir(path, to_screen=None):
6282 try:
6283 dn = os.path.dirname(path)
6284 if dn and not os.path.exists(dn):
6285 os.makedirs(dn)
6286 return True
6287 except (OSError, IOError) as err:
6288 if callable(to_screen) is not None:
6289 to_screen('unable to create directory ' + error_to_compat_str(err))
6290 return False
f74980cb 6291
6292
6293def get_executable_path():
c552ae88 6294 from zipimport import zipimporter
6295 if hasattr(sys, 'frozen'): # Running from PyInstaller
6296 path = os.path.dirname(sys.executable)
6297 elif isinstance(globals().get('__loader__'), zipimporter): # Running from ZIP
6298 path = os.path.join(os.path.dirname(__file__), '../..')
6299 else:
6300 path = os.path.join(os.path.dirname(__file__), '..')
f74980cb 6301 return os.path.abspath(path)
6302
6303
2f567473 6304def load_plugins(name, suffix, namespace):
f74980cb 6305 plugin_info = [None]
3ae5e797 6306 classes = {}
f74980cb 6307 try:
6308 plugin_info = imp.find_module(
6309 name, [os.path.join(get_executable_path(), 'ytdlp_plugins')])
6310 plugins = imp.load_module(name, *plugin_info)
6311 for name in dir(plugins):
2f567473 6312 if name in namespace:
6313 continue
6314 if not name.endswith(suffix):
f74980cb 6315 continue
6316 klass = getattr(plugins, name)
3ae5e797 6317 classes[name] = namespace[name] = klass
f74980cb 6318 except ImportError:
6319 pass
6320 finally:
6321 if plugin_info[0] is not None:
6322 plugin_info[0].close()
6323 return classes
06167fbb 6324
6325
325ebc17 6326def traverse_obj(
352d63fd 6327 obj, *path_list, default=None, expected_type=None, get_all=True,
325ebc17 6328 casesense=True, is_user_input=False, traverse_string=False):
324ad820 6329 ''' Traverse nested list/dict/tuple
8f334380 6330 @param path_list A list of paths which are checked one by one.
6331 Each path is a list of keys where each key is a string,
6332 a tuple of strings or "...". When a tuple is given,
6333 all the keys given in the tuple are traversed, and
6334 "..." traverses all the keys in the object
325ebc17 6335 @param default Default value to return
352d63fd 6336 @param expected_type Only accept final value of this type (Can also be any callable)
6337 @param get_all Return all the values obtained from a path or only the first one
324ad820 6338 @param casesense Whether to consider dictionary keys as case sensitive
6339 @param is_user_input Whether the keys are generated from user input. If True,
6340 strings are converted to int/slice if necessary
6341 @param traverse_string Whether to traverse inside strings. If True, any
6342 non-compatible object will also be converted into a string
8f334380 6343 # TODO: Write tests
324ad820 6344 '''
325ebc17 6345 if not casesense:
dbf5416a 6346 _lower = lambda k: (k.lower() if isinstance(k, str) else k)
8f334380 6347 path_list = (map(_lower, variadic(path)) for path in path_list)
6348
6349 def _traverse_obj(obj, path, _current_depth=0):
6350 nonlocal depth
575e17a1 6351 if obj is None:
6352 return None
8f334380 6353 path = tuple(variadic(path))
6354 for i, key in enumerate(path):
6355 if isinstance(key, (list, tuple)):
6356 obj = [_traverse_obj(obj, sub_key, _current_depth) for sub_key in key]
6357 key = ...
6358 if key is ...:
6359 obj = (obj.values() if isinstance(obj, dict)
6360 else obj if isinstance(obj, (list, tuple, LazyList))
6361 else str(obj) if traverse_string else [])
6362 _current_depth += 1
6363 depth = max(depth, _current_depth)
6364 return [_traverse_obj(inner_obj, path[i + 1:], _current_depth) for inner_obj in obj]
575e17a1 6365 elif isinstance(obj, dict) and not (is_user_input and key == ':'):
325ebc17 6366 obj = (obj.get(key) if casesense or (key in obj)
6367 else next((v for k, v in obj.items() if _lower(k) == key), None))
6368 else:
6369 if is_user_input:
6370 key = (int_or_none(key) if ':' not in key
6371 else slice(*map(int_or_none, key.split(':'))))
8f334380 6372 if key == slice(None):
575e17a1 6373 return _traverse_obj(obj, (..., *path[i + 1:]), _current_depth)
325ebc17 6374 if not isinstance(key, (int, slice)):
9fea350f 6375 return None
8f334380 6376 if not isinstance(obj, (list, tuple, LazyList)):
325ebc17 6377 if not traverse_string:
6378 return None
6379 obj = str(obj)
6380 try:
6381 obj = obj[key]
6382 except IndexError:
324ad820 6383 return None
325ebc17 6384 return obj
6385
352d63fd 6386 if isinstance(expected_type, type):
6387 type_test = lambda val: val if isinstance(val, expected_type) else None
6388 elif expected_type is not None:
6389 type_test = expected_type
6390 else:
6391 type_test = lambda val: val
6392
8f334380 6393 for path in path_list:
6394 depth = 0
6395 val = _traverse_obj(obj, path)
325ebc17 6396 if val is not None:
8f334380 6397 if depth:
6398 for _ in range(depth - 1):
6586bca9 6399 val = itertools.chain.from_iterable(v for v in val if v is not None)
352d63fd 6400 val = [v for v in map(type_test, val) if v is not None]
8f334380 6401 if val:
352d63fd 6402 return val if get_all else val[0]
6403 else:
6404 val = type_test(val)
6405 if val is not None:
8f334380 6406 return val
325ebc17 6407 return default
324ad820 6408
6409
6410def traverse_dict(dictn, keys, casesense=True):
6411 ''' For backward compatibility. Do not use '''
6412 return traverse_obj(dictn, keys, casesense=casesense,
6413 is_user_input=True, traverse_string=True)
6606817a 6414
6415
c634ad2a 6416def variadic(x, allowed_types=(str, bytes)):
cb89cfc1 6417 return x if isinstance(x, collections.abc.Iterable) and not isinstance(x, allowed_types) else (x,)
bd50a52b
THD
6418
6419
49fa4d9a
N
6420# create a JSON Web Signature (jws) with HS256 algorithm
6421# the resulting format is in JWS Compact Serialization
6422# implemented following JWT https://www.rfc-editor.org/rfc/rfc7519.html
6423# implemented following JWS https://www.rfc-editor.org/rfc/rfc7515.html
6424def jwt_encode_hs256(payload_data, key, headers={}):
6425 header_data = {
6426 'alg': 'HS256',
6427 'typ': 'JWT',
6428 }
6429 if headers:
6430 header_data.update(headers)
6431 header_b64 = base64.b64encode(json.dumps(header_data).encode('utf-8'))
6432 payload_b64 = base64.b64encode(json.dumps(payload_data).encode('utf-8'))
6433 h = hmac.new(key.encode('utf-8'), header_b64 + b'.' + payload_b64, hashlib.sha256)
6434 signature_b64 = base64.b64encode(h.digest())
6435 token = header_b64 + b'.' + payload_b64 + b'.' + signature_b64
6436 return token