]> jfr.im git - yt-dlp.git/blame - yt_dlp/utils.py
[fragment] Print error message when skipping fragment
[yt-dlp.git] / yt_dlp / utils.py
CommitLineData
cc52de43 1#!/usr/bin/env python3
dcdb292f 2# coding: utf-8
d77c3dfd 3
ecc0c5ee
PH
4from __future__ import unicode_literals
5
1e399778 6import base64
5bc880b9 7import binascii
912b38b4 8import calendar
676eb3f2 9import codecs
c380cc28 10import collections
62e609ab 11import contextlib
e3946f98 12import ctypes
c496ca96
PH
13import datetime
14import email.utils
0c265486 15import email.header
f45c185f 16import errno
be4a824d 17import functools
d77c3dfd 18import gzip
49fa4d9a
N
19import hashlib
20import hmac
019a94f7 21import importlib.util
03f9daab 22import io
79a2e94e 23import itertools
f4bfd65f 24import json
d77c3dfd 25import locale
02dbf93f 26import math
347de493 27import operator
d77c3dfd 28import os
c496ca96 29import platform
773f291d 30import random
d77c3dfd 31import re
c496ca96 32import socket
79a2e94e 33import ssl
1c088fa8 34import subprocess
d77c3dfd 35import sys
181c8655 36import tempfile
c380cc28 37import time
01951dda 38import traceback
bcf89ce6 39import xml.etree.ElementTree
d77c3dfd 40import zlib
d77c3dfd 41
8c25f81b 42from .compat import (
b4a3d461 43 compat_HTMLParseError,
8bb56eee 44 compat_HTMLParser,
201c1459 45 compat_HTTPError,
8f9312c3 46 compat_basestring,
8c25f81b 47 compat_chr,
1bab3437 48 compat_cookiejar,
d7cd9a9e 49 compat_ctypes_WINFUNCTYPE,
36e6f62c 50 compat_etree_fromstring,
51098426 51 compat_expanduser,
8c25f81b 52 compat_html_entities,
55b2f099 53 compat_html_entities_html5,
be4a824d 54 compat_http_client,
42db58ec 55 compat_integer_types,
e29663c6 56 compat_numeric_types,
c86b6142 57 compat_kwargs,
efa97bdc 58 compat_os_name,
8c25f81b 59 compat_parse_qs,
702ccf2d 60 compat_shlex_quote,
8c25f81b 61 compat_str,
edaa23f8 62 compat_struct_pack,
d3f8e038 63 compat_struct_unpack,
8c25f81b
PH
64 compat_urllib_error,
65 compat_urllib_parse,
15707c7e 66 compat_urllib_parse_urlencode,
8c25f81b 67 compat_urllib_parse_urlparse,
732044af 68 compat_urllib_parse_urlunparse,
69 compat_urllib_parse_quote,
70 compat_urllib_parse_quote_plus,
7581bfc9 71 compat_urllib_parse_unquote_plus,
8c25f81b
PH
72 compat_urllib_request,
73 compat_urlparse,
810c10ba 74 compat_xpath,
8c25f81b 75)
4644ac55 76
71aff188
YCH
77from .socks import (
78 ProxyType,
79 sockssocket,
80)
81
4644ac55 82
51fb4995
YCH
83def register_socks_protocols():
84 # "Register" SOCKS protocols
d5ae6bb5
YCH
85 # In Python < 2.6.5, urlsplit() suffers from bug https://bugs.python.org/issue7904
86 # URLs with protocols not in urlparse.uses_netloc are not handled correctly
51fb4995
YCH
87 for scheme in ('socks', 'socks4', 'socks4a', 'socks5'):
88 if scheme not in compat_urlparse.uses_netloc:
89 compat_urlparse.uses_netloc.append(scheme)
90
91
468e2e92
FV
92# This is not clearly defined otherwise
93compiled_regex_type = type(re.compile(''))
94
f7a147e3
S
95
96def random_user_agent():
97 _USER_AGENT_TPL = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36'
98 _CHROME_VERSIONS = (
99 '74.0.3729.129',
100 '76.0.3780.3',
101 '76.0.3780.2',
102 '74.0.3729.128',
103 '76.0.3780.1',
104 '76.0.3780.0',
105 '75.0.3770.15',
106 '74.0.3729.127',
107 '74.0.3729.126',
108 '76.0.3779.1',
109 '76.0.3779.0',
110 '75.0.3770.14',
111 '74.0.3729.125',
112 '76.0.3778.1',
113 '76.0.3778.0',
114 '75.0.3770.13',
115 '74.0.3729.124',
116 '74.0.3729.123',
117 '73.0.3683.121',
118 '76.0.3777.1',
119 '76.0.3777.0',
120 '75.0.3770.12',
121 '74.0.3729.122',
122 '76.0.3776.4',
123 '75.0.3770.11',
124 '74.0.3729.121',
125 '76.0.3776.3',
126 '76.0.3776.2',
127 '73.0.3683.120',
128 '74.0.3729.120',
129 '74.0.3729.119',
130 '74.0.3729.118',
131 '76.0.3776.1',
132 '76.0.3776.0',
133 '76.0.3775.5',
134 '75.0.3770.10',
135 '74.0.3729.117',
136 '76.0.3775.4',
137 '76.0.3775.3',
138 '74.0.3729.116',
139 '75.0.3770.9',
140 '76.0.3775.2',
141 '76.0.3775.1',
142 '76.0.3775.0',
143 '75.0.3770.8',
144 '74.0.3729.115',
145 '74.0.3729.114',
146 '76.0.3774.1',
147 '76.0.3774.0',
148 '75.0.3770.7',
149 '74.0.3729.113',
150 '74.0.3729.112',
151 '74.0.3729.111',
152 '76.0.3773.1',
153 '76.0.3773.0',
154 '75.0.3770.6',
155 '74.0.3729.110',
156 '74.0.3729.109',
157 '76.0.3772.1',
158 '76.0.3772.0',
159 '75.0.3770.5',
160 '74.0.3729.108',
161 '74.0.3729.107',
162 '76.0.3771.1',
163 '76.0.3771.0',
164 '75.0.3770.4',
165 '74.0.3729.106',
166 '74.0.3729.105',
167 '75.0.3770.3',
168 '74.0.3729.104',
169 '74.0.3729.103',
170 '74.0.3729.102',
171 '75.0.3770.2',
172 '74.0.3729.101',
173 '75.0.3770.1',
174 '75.0.3770.0',
175 '74.0.3729.100',
176 '75.0.3769.5',
177 '75.0.3769.4',
178 '74.0.3729.99',
179 '75.0.3769.3',
180 '75.0.3769.2',
181 '75.0.3768.6',
182 '74.0.3729.98',
183 '75.0.3769.1',
184 '75.0.3769.0',
185 '74.0.3729.97',
186 '73.0.3683.119',
187 '73.0.3683.118',
188 '74.0.3729.96',
189 '75.0.3768.5',
190 '75.0.3768.4',
191 '75.0.3768.3',
192 '75.0.3768.2',
193 '74.0.3729.95',
194 '74.0.3729.94',
195 '75.0.3768.1',
196 '75.0.3768.0',
197 '74.0.3729.93',
198 '74.0.3729.92',
199 '73.0.3683.117',
200 '74.0.3729.91',
201 '75.0.3766.3',
202 '74.0.3729.90',
203 '75.0.3767.2',
204 '75.0.3767.1',
205 '75.0.3767.0',
206 '74.0.3729.89',
207 '73.0.3683.116',
208 '75.0.3766.2',
209 '74.0.3729.88',
210 '75.0.3766.1',
211 '75.0.3766.0',
212 '74.0.3729.87',
213 '73.0.3683.115',
214 '74.0.3729.86',
215 '75.0.3765.1',
216 '75.0.3765.0',
217 '74.0.3729.85',
218 '73.0.3683.114',
219 '74.0.3729.84',
220 '75.0.3764.1',
221 '75.0.3764.0',
222 '74.0.3729.83',
223 '73.0.3683.113',
224 '75.0.3763.2',
225 '75.0.3761.4',
226 '74.0.3729.82',
227 '75.0.3763.1',
228 '75.0.3763.0',
229 '74.0.3729.81',
230 '73.0.3683.112',
231 '75.0.3762.1',
232 '75.0.3762.0',
233 '74.0.3729.80',
234 '75.0.3761.3',
235 '74.0.3729.79',
236 '73.0.3683.111',
237 '75.0.3761.2',
238 '74.0.3729.78',
239 '74.0.3729.77',
240 '75.0.3761.1',
241 '75.0.3761.0',
242 '73.0.3683.110',
243 '74.0.3729.76',
244 '74.0.3729.75',
245 '75.0.3760.0',
246 '74.0.3729.74',
247 '75.0.3759.8',
248 '75.0.3759.7',
249 '75.0.3759.6',
250 '74.0.3729.73',
251 '75.0.3759.5',
252 '74.0.3729.72',
253 '73.0.3683.109',
254 '75.0.3759.4',
255 '75.0.3759.3',
256 '74.0.3729.71',
257 '75.0.3759.2',
258 '74.0.3729.70',
259 '73.0.3683.108',
260 '74.0.3729.69',
261 '75.0.3759.1',
262 '75.0.3759.0',
263 '74.0.3729.68',
264 '73.0.3683.107',
265 '74.0.3729.67',
266 '75.0.3758.1',
267 '75.0.3758.0',
268 '74.0.3729.66',
269 '73.0.3683.106',
270 '74.0.3729.65',
271 '75.0.3757.1',
272 '75.0.3757.0',
273 '74.0.3729.64',
274 '73.0.3683.105',
275 '74.0.3729.63',
276 '75.0.3756.1',
277 '75.0.3756.0',
278 '74.0.3729.62',
279 '73.0.3683.104',
280 '75.0.3755.3',
281 '75.0.3755.2',
282 '73.0.3683.103',
283 '75.0.3755.1',
284 '75.0.3755.0',
285 '74.0.3729.61',
286 '73.0.3683.102',
287 '74.0.3729.60',
288 '75.0.3754.2',
289 '74.0.3729.59',
290 '75.0.3753.4',
291 '74.0.3729.58',
292 '75.0.3754.1',
293 '75.0.3754.0',
294 '74.0.3729.57',
295 '73.0.3683.101',
296 '75.0.3753.3',
297 '75.0.3752.2',
298 '75.0.3753.2',
299 '74.0.3729.56',
300 '75.0.3753.1',
301 '75.0.3753.0',
302 '74.0.3729.55',
303 '73.0.3683.100',
304 '74.0.3729.54',
305 '75.0.3752.1',
306 '75.0.3752.0',
307 '74.0.3729.53',
308 '73.0.3683.99',
309 '74.0.3729.52',
310 '75.0.3751.1',
311 '75.0.3751.0',
312 '74.0.3729.51',
313 '73.0.3683.98',
314 '74.0.3729.50',
315 '75.0.3750.0',
316 '74.0.3729.49',
317 '74.0.3729.48',
318 '74.0.3729.47',
319 '75.0.3749.3',
320 '74.0.3729.46',
321 '73.0.3683.97',
322 '75.0.3749.2',
323 '74.0.3729.45',
324 '75.0.3749.1',
325 '75.0.3749.0',
326 '74.0.3729.44',
327 '73.0.3683.96',
328 '74.0.3729.43',
329 '74.0.3729.42',
330 '75.0.3748.1',
331 '75.0.3748.0',
332 '74.0.3729.41',
333 '75.0.3747.1',
334 '73.0.3683.95',
335 '75.0.3746.4',
336 '74.0.3729.40',
337 '74.0.3729.39',
338 '75.0.3747.0',
339 '75.0.3746.3',
340 '75.0.3746.2',
341 '74.0.3729.38',
342 '75.0.3746.1',
343 '75.0.3746.0',
344 '74.0.3729.37',
345 '73.0.3683.94',
346 '75.0.3745.5',
347 '75.0.3745.4',
348 '75.0.3745.3',
349 '75.0.3745.2',
350 '74.0.3729.36',
351 '75.0.3745.1',
352 '75.0.3745.0',
353 '75.0.3744.2',
354 '74.0.3729.35',
355 '73.0.3683.93',
356 '74.0.3729.34',
357 '75.0.3744.1',
358 '75.0.3744.0',
359 '74.0.3729.33',
360 '73.0.3683.92',
361 '74.0.3729.32',
362 '74.0.3729.31',
363 '73.0.3683.91',
364 '75.0.3741.2',
365 '75.0.3740.5',
366 '74.0.3729.30',
367 '75.0.3741.1',
368 '75.0.3741.0',
369 '74.0.3729.29',
370 '75.0.3740.4',
371 '73.0.3683.90',
372 '74.0.3729.28',
373 '75.0.3740.3',
374 '73.0.3683.89',
375 '75.0.3740.2',
376 '74.0.3729.27',
377 '75.0.3740.1',
378 '75.0.3740.0',
379 '74.0.3729.26',
380 '73.0.3683.88',
381 '73.0.3683.87',
382 '74.0.3729.25',
383 '75.0.3739.1',
384 '75.0.3739.0',
385 '73.0.3683.86',
386 '74.0.3729.24',
387 '73.0.3683.85',
388 '75.0.3738.4',
389 '75.0.3738.3',
390 '75.0.3738.2',
391 '75.0.3738.1',
392 '75.0.3738.0',
393 '74.0.3729.23',
394 '73.0.3683.84',
395 '74.0.3729.22',
396 '74.0.3729.21',
397 '75.0.3737.1',
398 '75.0.3737.0',
399 '74.0.3729.20',
400 '73.0.3683.83',
401 '74.0.3729.19',
402 '75.0.3736.1',
403 '75.0.3736.0',
404 '74.0.3729.18',
405 '73.0.3683.82',
406 '74.0.3729.17',
407 '75.0.3735.1',
408 '75.0.3735.0',
409 '74.0.3729.16',
410 '73.0.3683.81',
411 '75.0.3734.1',
412 '75.0.3734.0',
413 '74.0.3729.15',
414 '73.0.3683.80',
415 '74.0.3729.14',
416 '75.0.3733.1',
417 '75.0.3733.0',
418 '75.0.3732.1',
419 '74.0.3729.13',
420 '74.0.3729.12',
421 '73.0.3683.79',
422 '74.0.3729.11',
423 '75.0.3732.0',
424 '74.0.3729.10',
425 '73.0.3683.78',
426 '74.0.3729.9',
427 '74.0.3729.8',
428 '74.0.3729.7',
429 '75.0.3731.3',
430 '75.0.3731.2',
431 '75.0.3731.0',
432 '74.0.3729.6',
433 '73.0.3683.77',
434 '73.0.3683.76',
435 '75.0.3730.5',
436 '75.0.3730.4',
437 '73.0.3683.75',
438 '74.0.3729.5',
439 '73.0.3683.74',
440 '75.0.3730.3',
441 '75.0.3730.2',
442 '74.0.3729.4',
443 '73.0.3683.73',
444 '73.0.3683.72',
445 '75.0.3730.1',
446 '75.0.3730.0',
447 '74.0.3729.3',
448 '73.0.3683.71',
449 '74.0.3729.2',
450 '73.0.3683.70',
451 '74.0.3729.1',
452 '74.0.3729.0',
453 '74.0.3726.4',
454 '73.0.3683.69',
455 '74.0.3726.3',
456 '74.0.3728.0',
457 '74.0.3726.2',
458 '73.0.3683.68',
459 '74.0.3726.1',
460 '74.0.3726.0',
461 '74.0.3725.4',
462 '73.0.3683.67',
463 '73.0.3683.66',
464 '74.0.3725.3',
465 '74.0.3725.2',
466 '74.0.3725.1',
467 '74.0.3724.8',
468 '74.0.3725.0',
469 '73.0.3683.65',
470 '74.0.3724.7',
471 '74.0.3724.6',
472 '74.0.3724.5',
473 '74.0.3724.4',
474 '74.0.3724.3',
475 '74.0.3724.2',
476 '74.0.3724.1',
477 '74.0.3724.0',
478 '73.0.3683.64',
479 '74.0.3723.1',
480 '74.0.3723.0',
481 '73.0.3683.63',
482 '74.0.3722.1',
483 '74.0.3722.0',
484 '73.0.3683.62',
485 '74.0.3718.9',
486 '74.0.3702.3',
487 '74.0.3721.3',
488 '74.0.3721.2',
489 '74.0.3721.1',
490 '74.0.3721.0',
491 '74.0.3720.6',
492 '73.0.3683.61',
493 '72.0.3626.122',
494 '73.0.3683.60',
495 '74.0.3720.5',
496 '72.0.3626.121',
497 '74.0.3718.8',
498 '74.0.3720.4',
499 '74.0.3720.3',
500 '74.0.3718.7',
501 '74.0.3720.2',
502 '74.0.3720.1',
503 '74.0.3720.0',
504 '74.0.3718.6',
505 '74.0.3719.5',
506 '73.0.3683.59',
507 '74.0.3718.5',
508 '74.0.3718.4',
509 '74.0.3719.4',
510 '74.0.3719.3',
511 '74.0.3719.2',
512 '74.0.3719.1',
513 '73.0.3683.58',
514 '74.0.3719.0',
515 '73.0.3683.57',
516 '73.0.3683.56',
517 '74.0.3718.3',
518 '73.0.3683.55',
519 '74.0.3718.2',
520 '74.0.3718.1',
521 '74.0.3718.0',
522 '73.0.3683.54',
523 '74.0.3717.2',
524 '73.0.3683.53',
525 '74.0.3717.1',
526 '74.0.3717.0',
527 '73.0.3683.52',
528 '74.0.3716.1',
529 '74.0.3716.0',
530 '73.0.3683.51',
531 '74.0.3715.1',
532 '74.0.3715.0',
533 '73.0.3683.50',
534 '74.0.3711.2',
535 '74.0.3714.2',
536 '74.0.3713.3',
537 '74.0.3714.1',
538 '74.0.3714.0',
539 '73.0.3683.49',
540 '74.0.3713.1',
541 '74.0.3713.0',
542 '72.0.3626.120',
543 '73.0.3683.48',
544 '74.0.3712.2',
545 '74.0.3712.1',
546 '74.0.3712.0',
547 '73.0.3683.47',
548 '72.0.3626.119',
549 '73.0.3683.46',
550 '74.0.3710.2',
551 '72.0.3626.118',
552 '74.0.3711.1',
553 '74.0.3711.0',
554 '73.0.3683.45',
555 '72.0.3626.117',
556 '74.0.3710.1',
557 '74.0.3710.0',
558 '73.0.3683.44',
559 '72.0.3626.116',
560 '74.0.3709.1',
561 '74.0.3709.0',
562 '74.0.3704.9',
563 '73.0.3683.43',
564 '72.0.3626.115',
565 '74.0.3704.8',
566 '74.0.3704.7',
567 '74.0.3708.0',
568 '74.0.3706.7',
569 '74.0.3704.6',
570 '73.0.3683.42',
571 '72.0.3626.114',
572 '74.0.3706.6',
573 '72.0.3626.113',
574 '74.0.3704.5',
575 '74.0.3706.5',
576 '74.0.3706.4',
577 '74.0.3706.3',
578 '74.0.3706.2',
579 '74.0.3706.1',
580 '74.0.3706.0',
581 '73.0.3683.41',
582 '72.0.3626.112',
583 '74.0.3705.1',
584 '74.0.3705.0',
585 '73.0.3683.40',
586 '72.0.3626.111',
587 '73.0.3683.39',
588 '74.0.3704.4',
589 '73.0.3683.38',
590 '74.0.3704.3',
591 '74.0.3704.2',
592 '74.0.3704.1',
593 '74.0.3704.0',
594 '73.0.3683.37',
595 '72.0.3626.110',
596 '72.0.3626.109',
597 '74.0.3703.3',
598 '74.0.3703.2',
599 '73.0.3683.36',
600 '74.0.3703.1',
601 '74.0.3703.0',
602 '73.0.3683.35',
603 '72.0.3626.108',
604 '74.0.3702.2',
605 '74.0.3699.3',
606 '74.0.3702.1',
607 '74.0.3702.0',
608 '73.0.3683.34',
609 '72.0.3626.107',
610 '73.0.3683.33',
611 '74.0.3701.1',
612 '74.0.3701.0',
613 '73.0.3683.32',
614 '73.0.3683.31',
615 '72.0.3626.105',
616 '74.0.3700.1',
617 '74.0.3700.0',
618 '73.0.3683.29',
619 '72.0.3626.103',
620 '74.0.3699.2',
621 '74.0.3699.1',
622 '74.0.3699.0',
623 '73.0.3683.28',
624 '72.0.3626.102',
625 '73.0.3683.27',
626 '73.0.3683.26',
627 '74.0.3698.0',
628 '74.0.3696.2',
629 '72.0.3626.101',
630 '73.0.3683.25',
631 '74.0.3696.1',
632 '74.0.3696.0',
633 '74.0.3694.8',
634 '72.0.3626.100',
635 '74.0.3694.7',
636 '74.0.3694.6',
637 '74.0.3694.5',
638 '74.0.3694.4',
639 '72.0.3626.99',
640 '72.0.3626.98',
641 '74.0.3694.3',
642 '73.0.3683.24',
643 '72.0.3626.97',
644 '72.0.3626.96',
645 '72.0.3626.95',
646 '73.0.3683.23',
647 '72.0.3626.94',
648 '73.0.3683.22',
649 '73.0.3683.21',
650 '72.0.3626.93',
651 '74.0.3694.2',
652 '72.0.3626.92',
653 '74.0.3694.1',
654 '74.0.3694.0',
655 '74.0.3693.6',
656 '73.0.3683.20',
657 '72.0.3626.91',
658 '74.0.3693.5',
659 '74.0.3693.4',
660 '74.0.3693.3',
661 '74.0.3693.2',
662 '73.0.3683.19',
663 '74.0.3693.1',
664 '74.0.3693.0',
665 '73.0.3683.18',
666 '72.0.3626.90',
667 '74.0.3692.1',
668 '74.0.3692.0',
669 '73.0.3683.17',
670 '72.0.3626.89',
671 '74.0.3687.3',
672 '74.0.3691.1',
673 '74.0.3691.0',
674 '73.0.3683.16',
675 '72.0.3626.88',
676 '72.0.3626.87',
677 '73.0.3683.15',
678 '74.0.3690.1',
679 '74.0.3690.0',
680 '73.0.3683.14',
681 '72.0.3626.86',
682 '73.0.3683.13',
683 '73.0.3683.12',
684 '74.0.3689.1',
685 '74.0.3689.0',
686 '73.0.3683.11',
687 '72.0.3626.85',
688 '73.0.3683.10',
689 '72.0.3626.84',
690 '73.0.3683.9',
691 '74.0.3688.1',
692 '74.0.3688.0',
693 '73.0.3683.8',
694 '72.0.3626.83',
695 '74.0.3687.2',
696 '74.0.3687.1',
697 '74.0.3687.0',
698 '73.0.3683.7',
699 '72.0.3626.82',
700 '74.0.3686.4',
701 '72.0.3626.81',
702 '74.0.3686.3',
703 '74.0.3686.2',
704 '74.0.3686.1',
705 '74.0.3686.0',
706 '73.0.3683.6',
707 '72.0.3626.80',
708 '74.0.3685.1',
709 '74.0.3685.0',
710 '73.0.3683.5',
711 '72.0.3626.79',
712 '74.0.3684.1',
713 '74.0.3684.0',
714 '73.0.3683.4',
715 '72.0.3626.78',
716 '72.0.3626.77',
717 '73.0.3683.3',
718 '73.0.3683.2',
719 '72.0.3626.76',
720 '73.0.3683.1',
721 '73.0.3683.0',
722 '72.0.3626.75',
723 '71.0.3578.141',
724 '73.0.3682.1',
725 '73.0.3682.0',
726 '72.0.3626.74',
727 '71.0.3578.140',
728 '73.0.3681.4',
729 '73.0.3681.3',
730 '73.0.3681.2',
731 '73.0.3681.1',
732 '73.0.3681.0',
733 '72.0.3626.73',
734 '71.0.3578.139',
735 '72.0.3626.72',
736 '72.0.3626.71',
737 '73.0.3680.1',
738 '73.0.3680.0',
739 '72.0.3626.70',
740 '71.0.3578.138',
741 '73.0.3678.2',
742 '73.0.3679.1',
743 '73.0.3679.0',
744 '72.0.3626.69',
745 '71.0.3578.137',
746 '73.0.3678.1',
747 '73.0.3678.0',
748 '71.0.3578.136',
749 '73.0.3677.1',
750 '73.0.3677.0',
751 '72.0.3626.68',
752 '72.0.3626.67',
753 '71.0.3578.135',
754 '73.0.3676.1',
755 '73.0.3676.0',
756 '73.0.3674.2',
757 '72.0.3626.66',
758 '71.0.3578.134',
759 '73.0.3674.1',
760 '73.0.3674.0',
761 '72.0.3626.65',
762 '71.0.3578.133',
763 '73.0.3673.2',
764 '73.0.3673.1',
765 '73.0.3673.0',
766 '72.0.3626.64',
767 '71.0.3578.132',
768 '72.0.3626.63',
769 '72.0.3626.62',
770 '72.0.3626.61',
771 '72.0.3626.60',
772 '73.0.3672.1',
773 '73.0.3672.0',
774 '72.0.3626.59',
775 '71.0.3578.131',
776 '73.0.3671.3',
777 '73.0.3671.2',
778 '73.0.3671.1',
779 '73.0.3671.0',
780 '72.0.3626.58',
781 '71.0.3578.130',
782 '73.0.3670.1',
783 '73.0.3670.0',
784 '72.0.3626.57',
785 '71.0.3578.129',
786 '73.0.3669.1',
787 '73.0.3669.0',
788 '72.0.3626.56',
789 '71.0.3578.128',
790 '73.0.3668.2',
791 '73.0.3668.1',
792 '73.0.3668.0',
793 '72.0.3626.55',
794 '71.0.3578.127',
795 '73.0.3667.2',
796 '73.0.3667.1',
797 '73.0.3667.0',
798 '72.0.3626.54',
799 '71.0.3578.126',
800 '73.0.3666.1',
801 '73.0.3666.0',
802 '72.0.3626.53',
803 '71.0.3578.125',
804 '73.0.3665.4',
805 '73.0.3665.3',
806 '72.0.3626.52',
807 '73.0.3665.2',
808 '73.0.3664.4',
809 '73.0.3665.1',
810 '73.0.3665.0',
811 '72.0.3626.51',
812 '71.0.3578.124',
813 '72.0.3626.50',
814 '73.0.3664.3',
815 '73.0.3664.2',
816 '73.0.3664.1',
817 '73.0.3664.0',
818 '73.0.3663.2',
819 '72.0.3626.49',
820 '71.0.3578.123',
821 '73.0.3663.1',
822 '73.0.3663.0',
823 '72.0.3626.48',
824 '71.0.3578.122',
825 '73.0.3662.1',
826 '73.0.3662.0',
827 '72.0.3626.47',
828 '71.0.3578.121',
829 '73.0.3661.1',
830 '72.0.3626.46',
831 '73.0.3661.0',
832 '72.0.3626.45',
833 '71.0.3578.120',
834 '73.0.3660.2',
835 '73.0.3660.1',
836 '73.0.3660.0',
837 '72.0.3626.44',
838 '71.0.3578.119',
839 '73.0.3659.1',
840 '73.0.3659.0',
841 '72.0.3626.43',
842 '71.0.3578.118',
843 '73.0.3658.1',
844 '73.0.3658.0',
845 '72.0.3626.42',
846 '71.0.3578.117',
847 '73.0.3657.1',
848 '73.0.3657.0',
849 '72.0.3626.41',
850 '71.0.3578.116',
851 '73.0.3656.1',
852 '73.0.3656.0',
853 '72.0.3626.40',
854 '71.0.3578.115',
855 '73.0.3655.1',
856 '73.0.3655.0',
857 '72.0.3626.39',
858 '71.0.3578.114',
859 '73.0.3654.1',
860 '73.0.3654.0',
861 '72.0.3626.38',
862 '71.0.3578.113',
863 '73.0.3653.1',
864 '73.0.3653.0',
865 '72.0.3626.37',
866 '71.0.3578.112',
867 '73.0.3652.1',
868 '73.0.3652.0',
869 '72.0.3626.36',
870 '71.0.3578.111',
871 '73.0.3651.1',
872 '73.0.3651.0',
873 '72.0.3626.35',
874 '71.0.3578.110',
875 '73.0.3650.1',
876 '73.0.3650.0',
877 '72.0.3626.34',
878 '71.0.3578.109',
879 '73.0.3649.1',
880 '73.0.3649.0',
881 '72.0.3626.33',
882 '71.0.3578.108',
883 '73.0.3648.2',
884 '73.0.3648.1',
885 '73.0.3648.0',
886 '72.0.3626.32',
887 '71.0.3578.107',
888 '73.0.3647.2',
889 '73.0.3647.1',
890 '73.0.3647.0',
891 '72.0.3626.31',
892 '71.0.3578.106',
893 '73.0.3635.3',
894 '73.0.3646.2',
895 '73.0.3646.1',
896 '73.0.3646.0',
897 '72.0.3626.30',
898 '71.0.3578.105',
899 '72.0.3626.29',
900 '73.0.3645.2',
901 '73.0.3645.1',
902 '73.0.3645.0',
903 '72.0.3626.28',
904 '71.0.3578.104',
905 '72.0.3626.27',
906 '72.0.3626.26',
907 '72.0.3626.25',
908 '72.0.3626.24',
909 '73.0.3644.0',
910 '73.0.3643.2',
911 '72.0.3626.23',
912 '71.0.3578.103',
913 '73.0.3643.1',
914 '73.0.3643.0',
915 '72.0.3626.22',
916 '71.0.3578.102',
917 '73.0.3642.1',
918 '73.0.3642.0',
919 '72.0.3626.21',
920 '71.0.3578.101',
921 '73.0.3641.1',
922 '73.0.3641.0',
923 '72.0.3626.20',
924 '71.0.3578.100',
925 '72.0.3626.19',
926 '73.0.3640.1',
927 '73.0.3640.0',
928 '72.0.3626.18',
929 '73.0.3639.1',
930 '71.0.3578.99',
931 '73.0.3639.0',
932 '72.0.3626.17',
933 '73.0.3638.2',
934 '72.0.3626.16',
935 '73.0.3638.1',
936 '73.0.3638.0',
937 '72.0.3626.15',
938 '71.0.3578.98',
939 '73.0.3635.2',
940 '71.0.3578.97',
941 '73.0.3637.1',
942 '73.0.3637.0',
943 '72.0.3626.14',
944 '71.0.3578.96',
945 '71.0.3578.95',
946 '72.0.3626.13',
947 '71.0.3578.94',
948 '73.0.3636.2',
949 '71.0.3578.93',
950 '73.0.3636.1',
951 '73.0.3636.0',
952 '72.0.3626.12',
953 '71.0.3578.92',
954 '73.0.3635.1',
955 '73.0.3635.0',
956 '72.0.3626.11',
957 '71.0.3578.91',
958 '73.0.3634.2',
959 '73.0.3634.1',
960 '73.0.3634.0',
961 '72.0.3626.10',
962 '71.0.3578.90',
963 '71.0.3578.89',
964 '73.0.3633.2',
965 '73.0.3633.1',
966 '73.0.3633.0',
967 '72.0.3610.4',
968 '72.0.3626.9',
969 '71.0.3578.88',
970 '73.0.3632.5',
971 '73.0.3632.4',
972 '73.0.3632.3',
973 '73.0.3632.2',
974 '73.0.3632.1',
975 '73.0.3632.0',
976 '72.0.3626.8',
977 '71.0.3578.87',
978 '73.0.3631.2',
979 '73.0.3631.1',
980 '73.0.3631.0',
981 '72.0.3626.7',
982 '71.0.3578.86',
983 '72.0.3626.6',
984 '73.0.3630.1',
985 '73.0.3630.0',
986 '72.0.3626.5',
987 '71.0.3578.85',
988 '72.0.3626.4',
989 '73.0.3628.3',
990 '73.0.3628.2',
991 '73.0.3629.1',
992 '73.0.3629.0',
993 '72.0.3626.3',
994 '71.0.3578.84',
995 '73.0.3628.1',
996 '73.0.3628.0',
997 '71.0.3578.83',
998 '73.0.3627.1',
999 '73.0.3627.0',
1000 '72.0.3626.2',
1001 '71.0.3578.82',
1002 '71.0.3578.81',
1003 '71.0.3578.80',
1004 '72.0.3626.1',
1005 '72.0.3626.0',
1006 '71.0.3578.79',
1007 '70.0.3538.124',
1008 '71.0.3578.78',
1009 '72.0.3623.4',
1010 '72.0.3625.2',
1011 '72.0.3625.1',
1012 '72.0.3625.0',
1013 '71.0.3578.77',
1014 '70.0.3538.123',
1015 '72.0.3624.4',
1016 '72.0.3624.3',
1017 '72.0.3624.2',
1018 '71.0.3578.76',
1019 '72.0.3624.1',
1020 '72.0.3624.0',
1021 '72.0.3623.3',
1022 '71.0.3578.75',
1023 '70.0.3538.122',
1024 '71.0.3578.74',
1025 '72.0.3623.2',
1026 '72.0.3610.3',
1027 '72.0.3623.1',
1028 '72.0.3623.0',
1029 '72.0.3622.3',
1030 '72.0.3622.2',
1031 '71.0.3578.73',
1032 '70.0.3538.121',
1033 '72.0.3622.1',
1034 '72.0.3622.0',
1035 '71.0.3578.72',
1036 '70.0.3538.120',
1037 '72.0.3621.1',
1038 '72.0.3621.0',
1039 '71.0.3578.71',
1040 '70.0.3538.119',
1041 '72.0.3620.1',
1042 '72.0.3620.0',
1043 '71.0.3578.70',
1044 '70.0.3538.118',
1045 '71.0.3578.69',
1046 '72.0.3619.1',
1047 '72.0.3619.0',
1048 '71.0.3578.68',
1049 '70.0.3538.117',
1050 '71.0.3578.67',
1051 '72.0.3618.1',
1052 '72.0.3618.0',
1053 '71.0.3578.66',
1054 '70.0.3538.116',
1055 '72.0.3617.1',
1056 '72.0.3617.0',
1057 '71.0.3578.65',
1058 '70.0.3538.115',
1059 '72.0.3602.3',
1060 '71.0.3578.64',
1061 '72.0.3616.1',
1062 '72.0.3616.0',
1063 '71.0.3578.63',
1064 '70.0.3538.114',
1065 '71.0.3578.62',
1066 '72.0.3615.1',
1067 '72.0.3615.0',
1068 '71.0.3578.61',
1069 '70.0.3538.113',
1070 '72.0.3614.1',
1071 '72.0.3614.0',
1072 '71.0.3578.60',
1073 '70.0.3538.112',
1074 '72.0.3613.1',
1075 '72.0.3613.0',
1076 '71.0.3578.59',
1077 '70.0.3538.111',
1078 '72.0.3612.2',
1079 '72.0.3612.1',
1080 '72.0.3612.0',
1081 '70.0.3538.110',
1082 '71.0.3578.58',
1083 '70.0.3538.109',
1084 '72.0.3611.2',
1085 '72.0.3611.1',
1086 '72.0.3611.0',
1087 '71.0.3578.57',
1088 '70.0.3538.108',
1089 '72.0.3610.2',
1090 '71.0.3578.56',
1091 '71.0.3578.55',
1092 '72.0.3610.1',
1093 '72.0.3610.0',
1094 '71.0.3578.54',
1095 '70.0.3538.107',
1096 '71.0.3578.53',
1097 '72.0.3609.3',
1098 '71.0.3578.52',
1099 '72.0.3609.2',
1100 '71.0.3578.51',
1101 '72.0.3608.5',
1102 '72.0.3609.1',
1103 '72.0.3609.0',
1104 '71.0.3578.50',
1105 '70.0.3538.106',
1106 '72.0.3608.4',
1107 '72.0.3608.3',
1108 '72.0.3608.2',
1109 '71.0.3578.49',
1110 '72.0.3608.1',
1111 '72.0.3608.0',
1112 '70.0.3538.105',
1113 '71.0.3578.48',
1114 '72.0.3607.1',
1115 '72.0.3607.0',
1116 '71.0.3578.47',
1117 '70.0.3538.104',
1118 '72.0.3606.2',
1119 '72.0.3606.1',
1120 '72.0.3606.0',
1121 '71.0.3578.46',
1122 '70.0.3538.103',
1123 '70.0.3538.102',
1124 '72.0.3605.3',
1125 '72.0.3605.2',
1126 '72.0.3605.1',
1127 '72.0.3605.0',
1128 '71.0.3578.45',
1129 '70.0.3538.101',
1130 '71.0.3578.44',
1131 '71.0.3578.43',
1132 '70.0.3538.100',
1133 '70.0.3538.99',
1134 '71.0.3578.42',
1135 '72.0.3604.1',
1136 '72.0.3604.0',
1137 '71.0.3578.41',
1138 '70.0.3538.98',
1139 '71.0.3578.40',
1140 '72.0.3603.2',
1141 '72.0.3603.1',
1142 '72.0.3603.0',
1143 '71.0.3578.39',
1144 '70.0.3538.97',
1145 '72.0.3602.2',
1146 '71.0.3578.38',
1147 '71.0.3578.37',
1148 '72.0.3602.1',
1149 '72.0.3602.0',
1150 '71.0.3578.36',
1151 '70.0.3538.96',
1152 '72.0.3601.1',
1153 '72.0.3601.0',
1154 '71.0.3578.35',
1155 '70.0.3538.95',
1156 '72.0.3600.1',
1157 '72.0.3600.0',
1158 '71.0.3578.34',
1159 '70.0.3538.94',
1160 '72.0.3599.3',
1161 '72.0.3599.2',
1162 '72.0.3599.1',
1163 '72.0.3599.0',
1164 '71.0.3578.33',
1165 '70.0.3538.93',
1166 '72.0.3598.1',
1167 '72.0.3598.0',
1168 '71.0.3578.32',
1169 '70.0.3538.87',
1170 '72.0.3597.1',
1171 '72.0.3597.0',
1172 '72.0.3596.2',
1173 '71.0.3578.31',
1174 '70.0.3538.86',
1175 '71.0.3578.30',
1176 '71.0.3578.29',
1177 '72.0.3596.1',
1178 '72.0.3596.0',
1179 '71.0.3578.28',
1180 '70.0.3538.85',
1181 '72.0.3595.2',
1182 '72.0.3591.3',
1183 '72.0.3595.1',
1184 '72.0.3595.0',
1185 '71.0.3578.27',
1186 '70.0.3538.84',
1187 '72.0.3594.1',
1188 '72.0.3594.0',
1189 '71.0.3578.26',
1190 '70.0.3538.83',
1191 '72.0.3593.2',
1192 '72.0.3593.1',
1193 '72.0.3593.0',
1194 '71.0.3578.25',
1195 '70.0.3538.82',
1196 '72.0.3589.3',
1197 '72.0.3592.2',
1198 '72.0.3592.1',
1199 '72.0.3592.0',
1200 '71.0.3578.24',
1201 '72.0.3589.2',
1202 '70.0.3538.81',
1203 '70.0.3538.80',
1204 '72.0.3591.2',
1205 '72.0.3591.1',
1206 '72.0.3591.0',
1207 '71.0.3578.23',
1208 '70.0.3538.79',
1209 '71.0.3578.22',
1210 '72.0.3590.1',
1211 '72.0.3590.0',
1212 '71.0.3578.21',
1213 '70.0.3538.78',
1214 '70.0.3538.77',
1215 '72.0.3589.1',
1216 '72.0.3589.0',
1217 '71.0.3578.20',
1218 '70.0.3538.76',
1219 '71.0.3578.19',
1220 '70.0.3538.75',
1221 '72.0.3588.1',
1222 '72.0.3588.0',
1223 '71.0.3578.18',
1224 '70.0.3538.74',
1225 '72.0.3586.2',
1226 '72.0.3587.0',
1227 '71.0.3578.17',
1228 '70.0.3538.73',
1229 '72.0.3586.1',
1230 '72.0.3586.0',
1231 '71.0.3578.16',
1232 '70.0.3538.72',
1233 '72.0.3585.1',
1234 '72.0.3585.0',
1235 '71.0.3578.15',
1236 '70.0.3538.71',
1237 '71.0.3578.14',
1238 '72.0.3584.1',
1239 '72.0.3584.0',
1240 '71.0.3578.13',
1241 '70.0.3538.70',
1242 '72.0.3583.2',
1243 '71.0.3578.12',
1244 '72.0.3583.1',
1245 '72.0.3583.0',
1246 '71.0.3578.11',
1247 '70.0.3538.69',
1248 '71.0.3578.10',
1249 '72.0.3582.0',
1250 '72.0.3581.4',
1251 '71.0.3578.9',
1252 '70.0.3538.67',
1253 '72.0.3581.3',
1254 '72.0.3581.2',
1255 '72.0.3581.1',
1256 '72.0.3581.0',
1257 '71.0.3578.8',
1258 '70.0.3538.66',
1259 '72.0.3580.1',
1260 '72.0.3580.0',
1261 '71.0.3578.7',
1262 '70.0.3538.65',
1263 '71.0.3578.6',
1264 '72.0.3579.1',
1265 '72.0.3579.0',
1266 '71.0.3578.5',
1267 '70.0.3538.64',
1268 '71.0.3578.4',
1269 '71.0.3578.3',
1270 '71.0.3578.2',
1271 '71.0.3578.1',
1272 '71.0.3578.0',
1273 '70.0.3538.63',
1274 '69.0.3497.128',
1275 '70.0.3538.62',
1276 '70.0.3538.61',
1277 '70.0.3538.60',
1278 '70.0.3538.59',
1279 '71.0.3577.1',
1280 '71.0.3577.0',
1281 '70.0.3538.58',
1282 '69.0.3497.127',
1283 '71.0.3576.2',
1284 '71.0.3576.1',
1285 '71.0.3576.0',
1286 '70.0.3538.57',
1287 '70.0.3538.56',
1288 '71.0.3575.2',
1289 '70.0.3538.55',
1290 '69.0.3497.126',
1291 '70.0.3538.54',
1292 '71.0.3575.1',
1293 '71.0.3575.0',
1294 '71.0.3574.1',
1295 '71.0.3574.0',
1296 '70.0.3538.53',
1297 '69.0.3497.125',
1298 '70.0.3538.52',
1299 '71.0.3573.1',
1300 '71.0.3573.0',
1301 '70.0.3538.51',
1302 '69.0.3497.124',
1303 '71.0.3572.1',
1304 '71.0.3572.0',
1305 '70.0.3538.50',
1306 '69.0.3497.123',
1307 '71.0.3571.2',
1308 '70.0.3538.49',
1309 '69.0.3497.122',
1310 '71.0.3571.1',
1311 '71.0.3571.0',
1312 '70.0.3538.48',
1313 '69.0.3497.121',
1314 '71.0.3570.1',
1315 '71.0.3570.0',
1316 '70.0.3538.47',
1317 '69.0.3497.120',
1318 '71.0.3568.2',
1319 '71.0.3569.1',
1320 '71.0.3569.0',
1321 '70.0.3538.46',
1322 '69.0.3497.119',
1323 '70.0.3538.45',
1324 '71.0.3568.1',
1325 '71.0.3568.0',
1326 '70.0.3538.44',
1327 '69.0.3497.118',
1328 '70.0.3538.43',
1329 '70.0.3538.42',
1330 '71.0.3567.1',
1331 '71.0.3567.0',
1332 '70.0.3538.41',
1333 '69.0.3497.117',
1334 '71.0.3566.1',
1335 '71.0.3566.0',
1336 '70.0.3538.40',
1337 '69.0.3497.116',
1338 '71.0.3565.1',
1339 '71.0.3565.0',
1340 '70.0.3538.39',
1341 '69.0.3497.115',
1342 '71.0.3564.1',
1343 '71.0.3564.0',
1344 '70.0.3538.38',
1345 '69.0.3497.114',
1346 '71.0.3563.0',
1347 '71.0.3562.2',
1348 '70.0.3538.37',
1349 '69.0.3497.113',
1350 '70.0.3538.36',
1351 '70.0.3538.35',
1352 '71.0.3562.1',
1353 '71.0.3562.0',
1354 '70.0.3538.34',
1355 '69.0.3497.112',
1356 '70.0.3538.33',
1357 '71.0.3561.1',
1358 '71.0.3561.0',
1359 '70.0.3538.32',
1360 '69.0.3497.111',
1361 '71.0.3559.6',
1362 '71.0.3560.1',
1363 '71.0.3560.0',
1364 '71.0.3559.5',
1365 '71.0.3559.4',
1366 '70.0.3538.31',
1367 '69.0.3497.110',
1368 '71.0.3559.3',
1369 '70.0.3538.30',
1370 '69.0.3497.109',
1371 '71.0.3559.2',
1372 '71.0.3559.1',
1373 '71.0.3559.0',
1374 '70.0.3538.29',
1375 '69.0.3497.108',
1376 '71.0.3558.2',
1377 '71.0.3558.1',
1378 '71.0.3558.0',
1379 '70.0.3538.28',
1380 '69.0.3497.107',
1381 '71.0.3557.2',
1382 '71.0.3557.1',
1383 '71.0.3557.0',
1384 '70.0.3538.27',
1385 '69.0.3497.106',
1386 '71.0.3554.4',
1387 '70.0.3538.26',
1388 '71.0.3556.1',
1389 '71.0.3556.0',
1390 '70.0.3538.25',
1391 '71.0.3554.3',
1392 '69.0.3497.105',
1393 '71.0.3554.2',
1394 '70.0.3538.24',
1395 '69.0.3497.104',
1396 '71.0.3555.2',
1397 '70.0.3538.23',
1398 '71.0.3555.1',
1399 '71.0.3555.0',
1400 '70.0.3538.22',
1401 '69.0.3497.103',
1402 '71.0.3554.1',
1403 '71.0.3554.0',
1404 '70.0.3538.21',
1405 '69.0.3497.102',
1406 '71.0.3553.3',
1407 '70.0.3538.20',
1408 '69.0.3497.101',
1409 '71.0.3553.2',
1410 '69.0.3497.100',
1411 '71.0.3553.1',
1412 '71.0.3553.0',
1413 '70.0.3538.19',
1414 '69.0.3497.99',
1415 '69.0.3497.98',
1416 '69.0.3497.97',
1417 '71.0.3552.6',
1418 '71.0.3552.5',
1419 '71.0.3552.4',
1420 '71.0.3552.3',
1421 '71.0.3552.2',
1422 '71.0.3552.1',
1423 '71.0.3552.0',
1424 '70.0.3538.18',
1425 '69.0.3497.96',
1426 '71.0.3551.3',
1427 '71.0.3551.2',
1428 '71.0.3551.1',
1429 '71.0.3551.0',
1430 '70.0.3538.17',
1431 '69.0.3497.95',
1432 '71.0.3550.3',
1433 '71.0.3550.2',
1434 '71.0.3550.1',
1435 '71.0.3550.0',
1436 '70.0.3538.16',
1437 '69.0.3497.94',
1438 '71.0.3549.1',
1439 '71.0.3549.0',
1440 '70.0.3538.15',
1441 '69.0.3497.93',
1442 '69.0.3497.92',
1443 '71.0.3548.1',
1444 '71.0.3548.0',
1445 '70.0.3538.14',
1446 '69.0.3497.91',
1447 '71.0.3547.1',
1448 '71.0.3547.0',
1449 '70.0.3538.13',
1450 '69.0.3497.90',
1451 '71.0.3546.2',
1452 '69.0.3497.89',
1453 '71.0.3546.1',
1454 '71.0.3546.0',
1455 '70.0.3538.12',
1456 '69.0.3497.88',
1457 '71.0.3545.4',
1458 '71.0.3545.3',
1459 '71.0.3545.2',
1460 '71.0.3545.1',
1461 '71.0.3545.0',
1462 '70.0.3538.11',
1463 '69.0.3497.87',
1464 '71.0.3544.5',
1465 '71.0.3544.4',
1466 '71.0.3544.3',
1467 '71.0.3544.2',
1468 '71.0.3544.1',
1469 '71.0.3544.0',
1470 '69.0.3497.86',
1471 '70.0.3538.10',
1472 '69.0.3497.85',
1473 '70.0.3538.9',
1474 '69.0.3497.84',
1475 '71.0.3543.4',
1476 '70.0.3538.8',
1477 '71.0.3543.3',
1478 '71.0.3543.2',
1479 '71.0.3543.1',
1480 '71.0.3543.0',
1481 '70.0.3538.7',
1482 '69.0.3497.83',
1483 '71.0.3542.2',
1484 '71.0.3542.1',
1485 '71.0.3542.0',
1486 '70.0.3538.6',
1487 '69.0.3497.82',
1488 '69.0.3497.81',
1489 '71.0.3541.1',
1490 '71.0.3541.0',
1491 '70.0.3538.5',
1492 '69.0.3497.80',
1493 '71.0.3540.1',
1494 '71.0.3540.0',
1495 '70.0.3538.4',
1496 '69.0.3497.79',
1497 '70.0.3538.3',
1498 '71.0.3539.1',
1499 '71.0.3539.0',
1500 '69.0.3497.78',
1501 '68.0.3440.134',
1502 '69.0.3497.77',
1503 '70.0.3538.2',
1504 '70.0.3538.1',
1505 '70.0.3538.0',
1506 '69.0.3497.76',
1507 '68.0.3440.133',
1508 '69.0.3497.75',
1509 '70.0.3537.2',
1510 '70.0.3537.1',
1511 '70.0.3537.0',
1512 '69.0.3497.74',
1513 '68.0.3440.132',
1514 '70.0.3536.0',
1515 '70.0.3535.5',
1516 '70.0.3535.4',
1517 '70.0.3535.3',
1518 '69.0.3497.73',
1519 '68.0.3440.131',
1520 '70.0.3532.8',
1521 '70.0.3532.7',
1522 '69.0.3497.72',
1523 '69.0.3497.71',
1524 '70.0.3535.2',
1525 '70.0.3535.1',
1526 '70.0.3535.0',
1527 '69.0.3497.70',
1528 '68.0.3440.130',
1529 '69.0.3497.69',
1530 '68.0.3440.129',
1531 '70.0.3534.4',
1532 '70.0.3534.3',
1533 '70.0.3534.2',
1534 '70.0.3534.1',
1535 '70.0.3534.0',
1536 '69.0.3497.68',
1537 '68.0.3440.128',
1538 '70.0.3533.2',
1539 '70.0.3533.1',
1540 '70.0.3533.0',
1541 '69.0.3497.67',
1542 '68.0.3440.127',
1543 '70.0.3532.6',
1544 '70.0.3532.5',
1545 '70.0.3532.4',
1546 '69.0.3497.66',
1547 '68.0.3440.126',
1548 '70.0.3532.3',
1549 '70.0.3532.2',
1550 '70.0.3532.1',
1551 '69.0.3497.60',
1552 '69.0.3497.65',
1553 '69.0.3497.64',
1554 '70.0.3532.0',
1555 '70.0.3531.0',
1556 '70.0.3530.4',
1557 '70.0.3530.3',
1558 '70.0.3530.2',
1559 '69.0.3497.58',
1560 '68.0.3440.125',
1561 '69.0.3497.57',
1562 '69.0.3497.56',
1563 '69.0.3497.55',
1564 '69.0.3497.54',
1565 '70.0.3530.1',
1566 '70.0.3530.0',
1567 '69.0.3497.53',
1568 '68.0.3440.124',
1569 '69.0.3497.52',
1570 '70.0.3529.3',
1571 '70.0.3529.2',
1572 '70.0.3529.1',
1573 '70.0.3529.0',
1574 '69.0.3497.51',
1575 '70.0.3528.4',
1576 '68.0.3440.123',
1577 '70.0.3528.3',
1578 '70.0.3528.2',
1579 '70.0.3528.1',
1580 '70.0.3528.0',
1581 '69.0.3497.50',
1582 '68.0.3440.122',
1583 '70.0.3527.1',
1584 '70.0.3527.0',
1585 '69.0.3497.49',
1586 '68.0.3440.121',
1587 '70.0.3526.1',
1588 '70.0.3526.0',
1589 '68.0.3440.120',
1590 '69.0.3497.48',
1591 '69.0.3497.47',
1592 '68.0.3440.119',
1593 '68.0.3440.118',
1594 '70.0.3525.5',
1595 '70.0.3525.4',
1596 '70.0.3525.3',
1597 '68.0.3440.117',
1598 '69.0.3497.46',
1599 '70.0.3525.2',
1600 '70.0.3525.1',
1601 '70.0.3525.0',
1602 '69.0.3497.45',
1603 '68.0.3440.116',
1604 '70.0.3524.4',
1605 '70.0.3524.3',
1606 '69.0.3497.44',
1607 '70.0.3524.2',
1608 '70.0.3524.1',
1609 '70.0.3524.0',
1610 '70.0.3523.2',
1611 '69.0.3497.43',
1612 '68.0.3440.115',
1613 '70.0.3505.9',
1614 '69.0.3497.42',
1615 '70.0.3505.8',
1616 '70.0.3523.1',
1617 '70.0.3523.0',
1618 '69.0.3497.41',
1619 '68.0.3440.114',
1620 '70.0.3505.7',
1621 '69.0.3497.40',
1622 '70.0.3522.1',
1623 '70.0.3522.0',
1624 '70.0.3521.2',
1625 '69.0.3497.39',
1626 '68.0.3440.113',
1627 '70.0.3505.6',
1628 '70.0.3521.1',
1629 '70.0.3521.0',
1630 '69.0.3497.38',
1631 '68.0.3440.112',
1632 '70.0.3520.1',
1633 '70.0.3520.0',
1634 '69.0.3497.37',
1635 '68.0.3440.111',
1636 '70.0.3519.3',
1637 '70.0.3519.2',
1638 '70.0.3519.1',
1639 '70.0.3519.0',
1640 '69.0.3497.36',
1641 '68.0.3440.110',
1642 '70.0.3518.1',
1643 '70.0.3518.0',
1644 '69.0.3497.35',
1645 '69.0.3497.34',
1646 '68.0.3440.109',
1647 '70.0.3517.1',
1648 '70.0.3517.0',
1649 '69.0.3497.33',
1650 '68.0.3440.108',
1651 '69.0.3497.32',
1652 '70.0.3516.3',
1653 '70.0.3516.2',
1654 '70.0.3516.1',
1655 '70.0.3516.0',
1656 '69.0.3497.31',
1657 '68.0.3440.107',
1658 '70.0.3515.4',
1659 '68.0.3440.106',
1660 '70.0.3515.3',
1661 '70.0.3515.2',
1662 '70.0.3515.1',
1663 '70.0.3515.0',
1664 '69.0.3497.30',
1665 '68.0.3440.105',
1666 '68.0.3440.104',
1667 '70.0.3514.2',
1668 '70.0.3514.1',
1669 '70.0.3514.0',
1670 '69.0.3497.29',
1671 '68.0.3440.103',
1672 '70.0.3513.1',
1673 '70.0.3513.0',
1674 '69.0.3497.28',
1675 )
1676 return _USER_AGENT_TPL % random.choice(_CHROME_VERSIONS)
1677
1678
3e669f36 1679std_headers = {
f7a147e3 1680 'User-Agent': random_user_agent(),
59ae15a5
PH
1681 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
1682 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
1683 'Accept-Encoding': 'gzip, deflate',
1684 'Accept-Language': 'en-us,en;q=0.5',
3e669f36 1685}
f427df17 1686
5f6a1245 1687
fb37eb25
S
1688USER_AGENTS = {
1689 'Safari': 'Mozilla/5.0 (X11; Linux x86_64; rv:10.0) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27',
1690}
1691
1692
bf42a990
S
1693NO_DEFAULT = object()
1694
7105440c
YCH
1695ENGLISH_MONTH_NAMES = [
1696 'January', 'February', 'March', 'April', 'May', 'June',
1697 'July', 'August', 'September', 'October', 'November', 'December']
1698
f6717dec
S
1699MONTH_NAMES = {
1700 'en': ENGLISH_MONTH_NAMES,
1701 'fr': [
3e4185c3
S
1702 'janvier', 'février', 'mars', 'avril', 'mai', 'juin',
1703 'juillet', 'août', 'septembre', 'octobre', 'novembre', 'décembre'],
f6717dec 1704}
a942d6cb 1705
a7aaa398
S
1706KNOWN_EXTENSIONS = (
1707 'mp4', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'aac',
1708 'flv', 'f4v', 'f4a', 'f4b',
1709 'webm', 'ogg', 'ogv', 'oga', 'ogx', 'spx', 'opus',
1710 'mkv', 'mka', 'mk3d',
1711 'avi', 'divx',
1712 'mov',
1713 'asf', 'wmv', 'wma',
1714 '3gp', '3g2',
1715 'mp3',
1716 'flac',
1717 'ape',
1718 'wav',
1719 'f4f', 'f4m', 'm3u8', 'smil')
1720
c587cbb7 1721# needed for sanitizing filenames in restricted mode
c8827027 1722ACCENT_CHARS = dict(zip('ÂÃÄÀÁÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖŐØŒÙÚÛÜŰÝÞßàáâãäåæçèéêëìíîïðñòóôõöőøœùúûüűýþÿ',
fd35d8cd
JW
1723 itertools.chain('AAAAAA', ['AE'], 'CEEEEIIIIDNOOOOOOO', ['OE'], 'UUUUUY', ['TH', 'ss'],
1724 'aaaaaa', ['ae'], 'ceeeeiiiionooooooo', ['oe'], 'uuuuuy', ['th'], 'y')))
c587cbb7 1725
46f59e89
S
1726DATE_FORMATS = (
1727 '%d %B %Y',
1728 '%d %b %Y',
1729 '%B %d %Y',
cb655f34
S
1730 '%B %dst %Y',
1731 '%B %dnd %Y',
9d30c213 1732 '%B %drd %Y',
cb655f34 1733 '%B %dth %Y',
46f59e89 1734 '%b %d %Y',
cb655f34
S
1735 '%b %dst %Y',
1736 '%b %dnd %Y',
9d30c213 1737 '%b %drd %Y',
cb655f34 1738 '%b %dth %Y',
46f59e89
S
1739 '%b %dst %Y %I:%M',
1740 '%b %dnd %Y %I:%M',
9d30c213 1741 '%b %drd %Y %I:%M',
46f59e89
S
1742 '%b %dth %Y %I:%M',
1743 '%Y %m %d',
1744 '%Y-%m-%d',
bccdbd22 1745 '%Y.%m.%d.',
46f59e89 1746 '%Y/%m/%d',
81c13222 1747 '%Y/%m/%d %H:%M',
46f59e89 1748 '%Y/%m/%d %H:%M:%S',
1931a55e
THD
1749 '%Y%m%d%H%M',
1750 '%Y%m%d%H%M%S',
0c1c6f4b 1751 '%Y-%m-%d %H:%M',
46f59e89
S
1752 '%Y-%m-%d %H:%M:%S',
1753 '%Y-%m-%d %H:%M:%S.%f',
5014558a 1754 '%Y-%m-%d %H:%M:%S:%f',
46f59e89
S
1755 '%d.%m.%Y %H:%M',
1756 '%d.%m.%Y %H.%M',
1757 '%Y-%m-%dT%H:%M:%SZ',
1758 '%Y-%m-%dT%H:%M:%S.%fZ',
1759 '%Y-%m-%dT%H:%M:%S.%f0Z',
1760 '%Y-%m-%dT%H:%M:%S',
1761 '%Y-%m-%dT%H:%M:%S.%f',
1762 '%Y-%m-%dT%H:%M',
c6eed6b8
S
1763 '%b %d %Y at %H:%M',
1764 '%b %d %Y at %H:%M:%S',
b555ae9b
S
1765 '%B %d %Y at %H:%M',
1766 '%B %d %Y at %H:%M:%S',
a63d9bd0 1767 '%H:%M %d-%b-%Y',
46f59e89
S
1768)
1769
1770DATE_FORMATS_DAY_FIRST = list(DATE_FORMATS)
1771DATE_FORMATS_DAY_FIRST.extend([
1772 '%d-%m-%Y',
1773 '%d.%m.%Y',
1774 '%d.%m.%y',
1775 '%d/%m/%Y',
1776 '%d/%m/%y',
1777 '%d/%m/%Y %H:%M:%S',
1778])
1779
1780DATE_FORMATS_MONTH_FIRST = list(DATE_FORMATS)
1781DATE_FORMATS_MONTH_FIRST.extend([
1782 '%m-%d-%Y',
1783 '%m.%d.%Y',
1784 '%m/%d/%Y',
1785 '%m/%d/%y',
1786 '%m/%d/%Y %H:%M:%S',
1787])
1788
06b3fe29 1789PACKED_CODES_RE = r"}\('(.+)',(\d+),(\d+),'([^']+)'\.split\('\|'\)"
22f5f5c6 1790JSON_LD_RE = r'(?is)<script[^>]+type=(["\']?)application/ld\+json\1[^>]*>(?P<json_ld>.+?)</script>'
06b3fe29 1791
7105440c 1792
d77c3dfd 1793def preferredencoding():
59ae15a5 1794 """Get preferred encoding.
d77c3dfd 1795
59ae15a5
PH
1796 Returns the best encoding scheme for the system, based on
1797 locale.getpreferredencoding() and some further tweaks.
1798 """
1799 try:
1800 pref = locale.getpreferredencoding()
28e614de 1801 'TEST'.encode(pref)
70a1165b 1802 except Exception:
59ae15a5 1803 pref = 'UTF-8'
bae611f2 1804
59ae15a5 1805 return pref
d77c3dfd 1806
f4bfd65f 1807
181c8655 1808def write_json_file(obj, fn):
1394646a 1809 """ Encode obj as JSON and write it to fn, atomically if possible """
181c8655 1810
92120217 1811 fn = encodeFilename(fn)
61ee5aeb 1812 if sys.version_info < (3, 0) and sys.platform != 'win32':
ec5f6016
JMF
1813 encoding = get_filesystem_encoding()
1814 # os.path.basename returns a bytes object, but NamedTemporaryFile
1815 # will fail if the filename contains non ascii characters unless we
1816 # use a unicode object
1817 path_basename = lambda f: os.path.basename(fn).decode(encoding)
1818 # the same for os.path.dirname
1819 path_dirname = lambda f: os.path.dirname(fn).decode(encoding)
1820 else:
1821 path_basename = os.path.basename
1822 path_dirname = os.path.dirname
1823
73159f99
S
1824 args = {
1825 'suffix': '.tmp',
ec5f6016
JMF
1826 'prefix': path_basename(fn) + '.',
1827 'dir': path_dirname(fn),
73159f99
S
1828 'delete': False,
1829 }
1830
181c8655
PH
1831 # In Python 2.x, json.dump expects a bytestream.
1832 # In Python 3.x, it writes to a character stream
1833 if sys.version_info < (3, 0):
73159f99 1834 args['mode'] = 'wb'
181c8655 1835 else:
73159f99
S
1836 args.update({
1837 'mode': 'w',
1838 'encoding': 'utf-8',
1839 })
1840
c86b6142 1841 tf = tempfile.NamedTemporaryFile(**compat_kwargs(args))
181c8655
PH
1842
1843 try:
1844 with tf:
6e84b215 1845 json.dump(obj, tf)
1394646a
IK
1846 if sys.platform == 'win32':
1847 # Need to remove existing file on Windows, else os.rename raises
1848 # WindowsError or FileExistsError.
1849 try:
1850 os.unlink(fn)
1851 except OSError:
1852 pass
9cd5f54e
R
1853 try:
1854 mask = os.umask(0)
1855 os.umask(mask)
1856 os.chmod(tf.name, 0o666 & ~mask)
1857 except OSError:
1858 pass
181c8655 1859 os.rename(tf.name, fn)
70a1165b 1860 except Exception:
181c8655
PH
1861 try:
1862 os.remove(tf.name)
1863 except OSError:
1864 pass
1865 raise
1866
1867
1868if sys.version_info >= (2, 7):
ee114368 1869 def find_xpath_attr(node, xpath, key, val=None):
59ae56fa 1870 """ Find the xpath xpath[@key=val] """
5d2354f1 1871 assert re.match(r'^[a-zA-Z_-]+$', key)
ee114368 1872 expr = xpath + ('[@%s]' % key if val is None else "[@%s='%s']" % (key, val))
59ae56fa
PH
1873 return node.find(expr)
1874else:
ee114368 1875 def find_xpath_attr(node, xpath, key, val=None):
810c10ba 1876 for f in node.findall(compat_xpath(xpath)):
ee114368
S
1877 if key not in f.attrib:
1878 continue
1879 if val is None or f.attrib.get(key) == val:
59ae56fa
PH
1880 return f
1881 return None
1882
d7e66d39
JMF
1883# On python2.6 the xml.etree.ElementTree.Element methods don't support
1884# the namespace parameter
5f6a1245
JW
1885
1886
d7e66d39
JMF
1887def xpath_with_ns(path, ns_map):
1888 components = [c.split(':') for c in path.split('/')]
1889 replaced = []
1890 for c in components:
1891 if len(c) == 1:
1892 replaced.append(c[0])
1893 else:
1894 ns, tag = c
1895 replaced.append('{%s}%s' % (ns_map[ns], tag))
1896 return '/'.join(replaced)
1897
d77c3dfd 1898
a41fb80c 1899def xpath_element(node, xpath, name=None, fatal=False, default=NO_DEFAULT):
578c0745 1900 def _find_xpath(xpath):
810c10ba 1901 return node.find(compat_xpath(xpath))
578c0745
S
1902
1903 if isinstance(xpath, (str, compat_str)):
1904 n = _find_xpath(xpath)
1905 else:
1906 for xp in xpath:
1907 n = _find_xpath(xp)
1908 if n is not None:
1909 break
d74bebd5 1910
8e636da4 1911 if n is None:
bf42a990
S
1912 if default is not NO_DEFAULT:
1913 return default
1914 elif fatal:
bf0ff932
PH
1915 name = xpath if name is None else name
1916 raise ExtractorError('Could not find XML element %s' % name)
1917 else:
1918 return None
a41fb80c
S
1919 return n
1920
1921
1922def xpath_text(node, xpath, name=None, fatal=False, default=NO_DEFAULT):
8e636da4
S
1923 n = xpath_element(node, xpath, name, fatal=fatal, default=default)
1924 if n is None or n == default:
1925 return n
1926 if n.text is None:
1927 if default is not NO_DEFAULT:
1928 return default
1929 elif fatal:
1930 name = xpath if name is None else name
1931 raise ExtractorError('Could not find XML element\'s text %s' % name)
1932 else:
1933 return None
1934 return n.text
a41fb80c
S
1935
1936
1937def xpath_attr(node, xpath, key, name=None, fatal=False, default=NO_DEFAULT):
1938 n = find_xpath_attr(node, xpath, key)
1939 if n is None:
1940 if default is not NO_DEFAULT:
1941 return default
1942 elif fatal:
1943 name = '%s[@%s]' % (xpath, key) if name is None else name
1944 raise ExtractorError('Could not find XML attribute %s' % name)
1945 else:
1946 return None
1947 return n.attrib[key]
bf0ff932
PH
1948
1949
9e6dd238 1950def get_element_by_id(id, html):
43e8fafd 1951 """Return the content of the tag with the specified ID in the passed HTML document"""
611c1dd9 1952 return get_element_by_attribute('id', id, html)
43e8fafd 1953
12ea2f30 1954
84c237fb 1955def get_element_by_class(class_name, html):
2af12ad9
TC
1956 """Return the content of the first tag with the specified class in the passed HTML document"""
1957 retval = get_elements_by_class(class_name, html)
1958 return retval[0] if retval else None
1959
1960
1961def get_element_by_attribute(attribute, value, html, escape_value=True):
1962 retval = get_elements_by_attribute(attribute, value, html, escape_value)
1963 return retval[0] if retval else None
1964
1965
1966def get_elements_by_class(class_name, html):
1967 """Return the content of all tags with the specified class in the passed HTML document as a list"""
1968 return get_elements_by_attribute(
84c237fb
YCH
1969 'class', r'[^\'"]*\b%s\b[^\'"]*' % re.escape(class_name),
1970 html, escape_value=False)
1971
1972
2af12ad9 1973def get_elements_by_attribute(attribute, value, html, escape_value=True):
43e8fafd 1974 """Return the content of the tag with the specified attribute in the passed HTML document"""
9e6dd238 1975
84c237fb
YCH
1976 value = re.escape(value) if escape_value else value
1977
2af12ad9
TC
1978 retlist = []
1979 for m in re.finditer(r'''(?xs)
38285056 1980 <([a-zA-Z0-9:._-]+)
609ff8ca 1981 (?:\s+[a-zA-Z0-9:._-]+(?:=[a-zA-Z0-9:._-]*|="[^"]*"|='[^']*'|))*?
38285056 1982 \s+%s=['"]?%s['"]?
609ff8ca 1983 (?:\s+[a-zA-Z0-9:._-]+(?:=[a-zA-Z0-9:._-]*|="[^"]*"|='[^']*'|))*?
38285056
PH
1984 \s*>
1985 (?P<content>.*?)
1986 </\1>
2af12ad9
TC
1987 ''' % (re.escape(attribute), value), html):
1988 res = m.group('content')
38285056 1989
2af12ad9
TC
1990 if res.startswith('"') or res.startswith("'"):
1991 res = res[1:-1]
38285056 1992
2af12ad9 1993 retlist.append(unescapeHTML(res))
a921f407 1994
2af12ad9 1995 return retlist
a921f407 1996
c5229f39 1997
8bb56eee
BF
1998class HTMLAttributeParser(compat_HTMLParser):
1999 """Trivial HTML parser to gather the attributes for a single element"""
b6e0c7d2 2000
8bb56eee 2001 def __init__(self):
c5229f39 2002 self.attrs = {}
8bb56eee
BF
2003 compat_HTMLParser.__init__(self)
2004
2005 def handle_starttag(self, tag, attrs):
2006 self.attrs = dict(attrs)
2007
c5229f39 2008
8bb56eee
BF
2009def extract_attributes(html_element):
2010 """Given a string for an HTML element such as
2011 <el
2012 a="foo" B="bar" c="&98;az" d=boz
2013 empty= noval entity="&amp;"
2014 sq='"' dq="'"
2015 >
2016 Decode and return a dictionary of attributes.
2017 {
2018 'a': 'foo', 'b': 'bar', c: 'baz', d: 'boz',
2019 'empty': '', 'noval': None, 'entity': '&',
2020 'sq': '"', 'dq': '\''
2021 }.
2022 NB HTMLParser is stricter in Python 2.6 & 3.2 than in later versions,
2023 but the cases in the unit test will work for all of 2.6, 2.7, 3.2-3.5.
2024 """
2025 parser = HTMLAttributeParser()
b4a3d461
S
2026 try:
2027 parser.feed(html_element)
2028 parser.close()
2029 # Older Python may throw HTMLParseError in case of malformed HTML
2030 except compat_HTMLParseError:
2031 pass
8bb56eee 2032 return parser.attrs
9e6dd238 2033
c5229f39 2034
9e6dd238 2035def clean_html(html):
59ae15a5 2036 """Clean an HTML snippet into a readable string"""
dd622d7c
PH
2037
2038 if html is None: # Convenience for sanitizing descriptions etc.
2039 return html
2040
59ae15a5
PH
2041 # Newline vs <br />
2042 html = html.replace('\n', ' ')
edd9221c
TF
2043 html = re.sub(r'(?u)\s*<\s*br\s*/?\s*>\s*', '\n', html)
2044 html = re.sub(r'(?u)<\s*/\s*p\s*>\s*<\s*p[^>]*>', '\n', html)
59ae15a5
PH
2045 # Strip html tags
2046 html = re.sub('<.*?>', '', html)
2047 # Replace html entities
2048 html = unescapeHTML(html)
7decf895 2049 return html.strip()
9e6dd238
FV
2050
2051
d77c3dfd 2052def sanitize_open(filename, open_mode):
59ae15a5
PH
2053 """Try to open the given filename, and slightly tweak it if this fails.
2054
2055 Attempts to open the given filename. If this fails, it tries to change
2056 the filename slightly, step by step, until it's either able to open it
2057 or it fails and raises a final exception, like the standard open()
2058 function.
2059
2060 It returns the tuple (stream, definitive_file_name).
2061 """
2062 try:
28e614de 2063 if filename == '-':
59ae15a5
PH
2064 if sys.platform == 'win32':
2065 import msvcrt
2066 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
898280a0 2067 return (sys.stdout.buffer if hasattr(sys.stdout, 'buffer') else sys.stdout, filename)
59ae15a5
PH
2068 stream = open(encodeFilename(filename), open_mode)
2069 return (stream, filename)
2070 except (IOError, OSError) as err:
f45c185f
PH
2071 if err.errno in (errno.EACCES,):
2072 raise
59ae15a5 2073
f45c185f 2074 # In case of error, try to remove win32 forbidden chars
d55de57b 2075 alt_filename = sanitize_path(filename)
f45c185f
PH
2076 if alt_filename == filename:
2077 raise
2078 else:
2079 # An exception here should be caught in the caller
d55de57b 2080 stream = open(encodeFilename(alt_filename), open_mode)
f45c185f 2081 return (stream, alt_filename)
d77c3dfd
FV
2082
2083
2084def timeconvert(timestr):
59ae15a5
PH
2085 """Convert RFC 2822 defined time string into system timestamp"""
2086 timestamp = None
2087 timetuple = email.utils.parsedate_tz(timestr)
2088 if timetuple is not None:
2089 timestamp = email.utils.mktime_tz(timetuple)
2090 return timestamp
1c469a94 2091
5f6a1245 2092
796173d0 2093def sanitize_filename(s, restricted=False, is_id=False):
59ae15a5
PH
2094 """Sanitizes a string so it could be used as part of a filename.
2095 If restricted is set, use a stricter subset of allowed characters.
158af524
S
2096 Set is_id if this is not an arbitrary string, but an ID that should be kept
2097 if possible.
59ae15a5
PH
2098 """
2099 def replace_insane(char):
c587cbb7
AT
2100 if restricted and char in ACCENT_CHARS:
2101 return ACCENT_CHARS[char]
91dd88b9 2102 elif not restricted and char == '\n':
2103 return ' '
2104 elif char == '?' or ord(char) < 32 or ord(char) == 127:
59ae15a5
PH
2105 return ''
2106 elif char == '"':
2107 return '' if restricted else '\''
2108 elif char == ':':
2109 return '_-' if restricted else ' -'
2110 elif char in '\\/|*<>':
2111 return '_'
627dcfff 2112 if restricted and (char in '!&\'()[]{}$;`^,#' or char.isspace()):
59ae15a5
PH
2113 return '_'
2114 if restricted and ord(char) > 127:
2115 return '_'
2116 return char
2117
639f1cea 2118 if s == '':
2119 return ''
2aeb06d6
PH
2120 # Handle timestamps
2121 s = re.sub(r'[0-9]+(?::[0-9]+)+', lambda m: m.group(0).replace(':', '_'), s)
28e614de 2122 result = ''.join(map(replace_insane, s))
796173d0
PH
2123 if not is_id:
2124 while '__' in result:
2125 result = result.replace('__', '_')
2126 result = result.strip('_')
2127 # Common case of "Foreign band name - English song title"
2128 if restricted and result.startswith('-_'):
2129 result = result[2:]
5a42414b
PH
2130 if result.startswith('-'):
2131 result = '_' + result[len('-'):]
a7440261 2132 result = result.lstrip('.')
796173d0
PH
2133 if not result:
2134 result = '_'
59ae15a5 2135 return result
d77c3dfd 2136
5f6a1245 2137
c2934512 2138def sanitize_path(s, force=False):
a2aaf4db 2139 """Sanitizes and normalizes path on Windows"""
c2934512 2140 if sys.platform == 'win32':
c4218ac3 2141 force = False
c2934512 2142 drive_or_unc, _ = os.path.splitdrive(s)
2143 if sys.version_info < (2, 7) and not drive_or_unc:
2144 drive_or_unc, _ = os.path.splitunc(s)
2145 elif force:
2146 drive_or_unc = ''
2147 else:
a2aaf4db 2148 return s
c2934512 2149
be531ef1
S
2150 norm_path = os.path.normpath(remove_start(s, drive_or_unc)).split(os.path.sep)
2151 if drive_or_unc:
a2aaf4db
S
2152 norm_path.pop(0)
2153 sanitized_path = [
ec85ded8 2154 path_part if path_part in ['.', '..'] else re.sub(r'(?:[/<>:"\|\\?\*]|[\s.]$)', '#', path_part)
a2aaf4db 2155 for path_part in norm_path]
be531ef1
S
2156 if drive_or_unc:
2157 sanitized_path.insert(0, drive_or_unc + os.path.sep)
c4218ac3 2158 elif force and s[0] == os.path.sep:
2159 sanitized_path.insert(0, os.path.sep)
a2aaf4db
S
2160 return os.path.join(*sanitized_path)
2161
2162
17bcc626 2163def sanitize_url(url):
befa4708
S
2164 # Prepend protocol-less URLs with `http:` scheme in order to mitigate
2165 # the number of unwanted failures due to missing protocol
2166 if url.startswith('//'):
2167 return 'http:%s' % url
2168 # Fix some common typos seen so far
2169 COMMON_TYPOS = (
067aa17e 2170 # https://github.com/ytdl-org/youtube-dl/issues/15649
befa4708
S
2171 (r'^httpss://', r'https://'),
2172 # https://bx1.be/lives/direct-tv/
2173 (r'^rmtp([es]?)://', r'rtmp\1://'),
2174 )
2175 for mistake, fixup in COMMON_TYPOS:
2176 if re.match(mistake, url):
2177 return re.sub(mistake, fixup, url)
bc6b9bcd 2178 return url
17bcc626
S
2179
2180
5435dcf9
HH
2181def extract_basic_auth(url):
2182 parts = compat_urlparse.urlsplit(url)
2183 if parts.username is None:
2184 return url, None
2185 url = compat_urlparse.urlunsplit(parts._replace(netloc=(
2186 parts.hostname if parts.port is None
2187 else '%s:%d' % (parts.hostname, parts.port))))
2188 auth_payload = base64.b64encode(
2189 ('%s:%s' % (parts.username, parts.password or '')).encode('utf-8'))
2190 return url, 'Basic ' + auth_payload.decode('utf-8')
2191
2192
67dda517 2193def sanitized_Request(url, *args, **kwargs):
bc6b9bcd 2194 url, auth_header = extract_basic_auth(escape_url(sanitize_url(url)))
5435dcf9
HH
2195 if auth_header is not None:
2196 headers = args[1] if len(args) >= 2 else kwargs.setdefault('headers', {})
2197 headers['Authorization'] = auth_header
2198 return compat_urllib_request.Request(url, *args, **kwargs)
67dda517
S
2199
2200
51098426
S
2201def expand_path(s):
2202 """Expand shell variables and ~"""
2203 return os.path.expandvars(compat_expanduser(s))
2204
2205
d77c3dfd 2206def orderedSet(iterable):
59ae15a5
PH
2207 """ Remove all duplicates from the input iterable """
2208 res = []
2209 for el in iterable:
2210 if el not in res:
2211 res.append(el)
2212 return res
d77c3dfd 2213
912b38b4 2214
55b2f099 2215def _htmlentity_transform(entity_with_semicolon):
4e408e47 2216 """Transforms an HTML entity to a character."""
55b2f099
YCH
2217 entity = entity_with_semicolon[:-1]
2218
4e408e47
PH
2219 # Known non-numeric HTML entity
2220 if entity in compat_html_entities.name2codepoint:
2221 return compat_chr(compat_html_entities.name2codepoint[entity])
2222
55b2f099
YCH
2223 # TODO: HTML5 allows entities without a semicolon. For example,
2224 # '&Eacuteric' should be decoded as 'Éric'.
2225 if entity_with_semicolon in compat_html_entities_html5:
2226 return compat_html_entities_html5[entity_with_semicolon]
2227
91757b0f 2228 mobj = re.match(r'#(x[0-9a-fA-F]+|[0-9]+)', entity)
4e408e47
PH
2229 if mobj is not None:
2230 numstr = mobj.group(1)
28e614de 2231 if numstr.startswith('x'):
4e408e47 2232 base = 16
28e614de 2233 numstr = '0%s' % numstr
4e408e47
PH
2234 else:
2235 base = 10
067aa17e 2236 # See https://github.com/ytdl-org/youtube-dl/issues/7518
7aefc49c
S
2237 try:
2238 return compat_chr(int(numstr, base))
2239 except ValueError:
2240 pass
4e408e47
PH
2241
2242 # Unknown entity in name, return its literal representation
7a3f0c00 2243 return '&%s;' % entity
4e408e47
PH
2244
2245
d77c3dfd 2246def unescapeHTML(s):
912b38b4
PH
2247 if s is None:
2248 return None
2249 assert type(s) == compat_str
d77c3dfd 2250
4e408e47 2251 return re.sub(
95f3f7c2 2252 r'&([^&;]+;)', lambda m: _htmlentity_transform(m.group(1)), s)
d77c3dfd 2253
8bf48f23 2254
cdb19aa4 2255def escapeHTML(text):
2256 return (
2257 text
2258 .replace('&', '&amp;')
2259 .replace('<', '&lt;')
2260 .replace('>', '&gt;')
2261 .replace('"', '&quot;')
2262 .replace("'", '&#39;')
2263 )
2264
2265
f5b1bca9 2266def process_communicate_or_kill(p, *args, **kwargs):
2267 try:
2268 return p.communicate(*args, **kwargs)
2269 except BaseException: # Including KeyboardInterrupt
2270 p.kill()
2271 p.wait()
2272 raise
2273
2274
aa49acd1
S
2275def get_subprocess_encoding():
2276 if sys.platform == 'win32' and sys.getwindowsversion()[0] >= 5:
2277 # For subprocess calls, encode with locale encoding
2278 # Refer to http://stackoverflow.com/a/9951851/35070
2279 encoding = preferredencoding()
2280 else:
2281 encoding = sys.getfilesystemencoding()
2282 if encoding is None:
2283 encoding = 'utf-8'
2284 return encoding
2285
2286
8bf48f23 2287def encodeFilename(s, for_subprocess=False):
59ae15a5
PH
2288 """
2289 @param s The name of the file
2290 """
d77c3dfd 2291
8bf48f23 2292 assert type(s) == compat_str
d77c3dfd 2293
59ae15a5
PH
2294 # Python 3 has a Unicode API
2295 if sys.version_info >= (3, 0):
2296 return s
0f00efed 2297
aa49acd1
S
2298 # Pass '' directly to use Unicode APIs on Windows 2000 and up
2299 # (Detecting Windows NT 4 is tricky because 'major >= 4' would
2300 # match Windows 9x series as well. Besides, NT 4 is obsolete.)
2301 if not for_subprocess and sys.platform == 'win32' and sys.getwindowsversion()[0] >= 5:
2302 return s
2303
8ee239e9
YCH
2304 # Jython assumes filenames are Unicode strings though reported as Python 2.x compatible
2305 if sys.platform.startswith('java'):
2306 return s
2307
aa49acd1
S
2308 return s.encode(get_subprocess_encoding(), 'ignore')
2309
2310
2311def decodeFilename(b, for_subprocess=False):
2312
2313 if sys.version_info >= (3, 0):
2314 return b
2315
2316 if not isinstance(b, bytes):
2317 return b
2318
2319 return b.decode(get_subprocess_encoding(), 'ignore')
8bf48f23 2320
f07b74fc
PH
2321
2322def encodeArgument(s):
2323 if not isinstance(s, compat_str):
2324 # Legacy code that uses byte strings
2325 # Uncomment the following line after fixing all post processors
7af808a5 2326 # assert False, 'Internal error: %r should be of type %r, is %r' % (s, compat_str, type(s))
f07b74fc
PH
2327 s = s.decode('ascii')
2328 return encodeFilename(s, True)
2329
2330
aa49acd1
S
2331def decodeArgument(b):
2332 return decodeFilename(b, True)
2333
2334
8271226a
PH
2335def decodeOption(optval):
2336 if optval is None:
2337 return optval
2338 if isinstance(optval, bytes):
2339 optval = optval.decode(preferredencoding())
2340
2341 assert isinstance(optval, compat_str)
2342 return optval
1c256f70 2343
5f6a1245 2344
aa7785f8 2345_timetuple = collections.namedtuple('Time', ('hours', 'minutes', 'seconds', 'milliseconds'))
2346
2347
2348def timetuple_from_msec(msec):
2349 secs, msec = divmod(msec, 1000)
2350 mins, secs = divmod(secs, 60)
2351 hrs, mins = divmod(mins, 60)
2352 return _timetuple(hrs, mins, secs, msec)
2353
2354
cdb19aa4 2355def formatSeconds(secs, delim=':', msec=False):
aa7785f8 2356 time = timetuple_from_msec(secs * 1000)
2357 if time.hours:
2358 ret = '%d%s%02d%s%02d' % (time.hours, delim, time.minutes, delim, time.seconds)
2359 elif time.minutes:
2360 ret = '%d%s%02d' % (time.minutes, delim, time.seconds)
4539dd30 2361 else:
aa7785f8 2362 ret = '%d' % time.seconds
2363 return '%s.%03d' % (ret, time.milliseconds) if msec else ret
4539dd30 2364
a0ddb8a2 2365
77562778 2366def _ssl_load_windows_store_certs(ssl_context, storename):
2367 # Code adapted from _load_windows_store_certs in https://github.com/python/cpython/blob/main/Lib/ssl.py
2368 try:
2369 certs = [cert for cert, encoding, trust in ssl.enum_certificates(storename)
2370 if encoding == 'x509_asn' and (
2371 trust is True or ssl.Purpose.SERVER_AUTH.oid in trust)]
2372 except PermissionError:
2373 return
2374 for cert in certs:
a2366922 2375 try:
77562778 2376 ssl_context.load_verify_locations(cadata=cert)
2377 except ssl.SSLError:
a2366922
PH
2378 pass
2379
77562778 2380
2381def make_HTTPS_handler(params, **kwargs):
2382 opts_check_certificate = not params.get('nocheckcertificate')
2383 context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
2384 context.check_hostname = opts_check_certificate
2385 context.verify_mode = ssl.CERT_REQUIRED if opts_check_certificate else ssl.CERT_NONE
2386 if opts_check_certificate:
4e3d1898 2387 try:
2388 context.load_default_certs()
2389 # Work around the issue in load_default_certs when there are bad certificates. See:
2390 # https://github.com/yt-dlp/yt-dlp/issues/1060,
2391 # https://bugs.python.org/issue35665, https://bugs.python.org/issue45312
2392 except ssl.SSLError:
2393 # enum_certificates is not present in mingw python. See https://github.com/yt-dlp/yt-dlp/issues/1151
2394 if sys.platform == 'win32' and hasattr(ssl, 'enum_certificates'):
2395 # Create a new context to discard any certificates that were already loaded
2396 context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
2397 context.check_hostname, context.verify_mode = True, ssl.CERT_REQUIRED
2398 for storename in ('CA', 'ROOT'):
2399 _ssl_load_windows_store_certs(context, storename)
2400 context.set_default_verify_paths()
77562778 2401 return YoutubeDLHTTPSHandler(params, context=context, **kwargs)
ea6d901e 2402
732ea2f0 2403
5873d4cc 2404def bug_reports_message(before=';'):
08f2a92c 2405 if ytdl_is_updateable():
7a5c1cfe 2406 update_cmd = 'type yt-dlp -U to update'
08f2a92c 2407 else:
7a5c1cfe 2408 update_cmd = 'see https://github.com/yt-dlp/yt-dlp on how to update'
5873d4cc 2409 msg = 'please report this issue on https://github.com/yt-dlp/yt-dlp .'
08f2a92c 2410 msg += ' Make sure you are using the latest version; %s.' % update_cmd
7a5c1cfe 2411 msg += ' Be sure to call yt-dlp with the --verbose flag and include its complete output.'
5873d4cc
F
2412
2413 before = before.rstrip()
2414 if not before or before.endswith(('.', '!', '?')):
2415 msg = msg[0].title() + msg[1:]
2416
2417 return (before + ' ' if before else '') + msg
08f2a92c
JMF
2418
2419
bf5b9d85
PM
2420class YoutubeDLError(Exception):
2421 """Base exception for YoutubeDL errors."""
2422 pass
2423
2424
3158150c 2425network_exceptions = [compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error]
2426if hasattr(ssl, 'CertificateError'):
2427 network_exceptions.append(ssl.CertificateError)
2428network_exceptions = tuple(network_exceptions)
2429
2430
bf5b9d85 2431class ExtractorError(YoutubeDLError):
1c256f70 2432 """Error during info extraction."""
5f6a1245 2433
1151c407 2434 def __init__(self, msg, tb=None, expected=False, cause=None, video_id=None, ie=None):
9a82b238 2435 """ tb, if given, is the original traceback (so that it can be printed out).
7a5c1cfe 2436 If expected is set, this is a normal error message and most likely not a bug in yt-dlp.
9a82b238 2437 """
3158150c 2438 if sys.exc_info()[0] in network_exceptions:
9a82b238 2439 expected = True
d5979c5d 2440
526d74ec 2441 self.msg = str(msg)
1c256f70 2442 self.traceback = tb
1151c407 2443 self.expected = expected
2eabb802 2444 self.cause = cause
d11271dd 2445 self.video_id = video_id
1151c407 2446 self.ie = ie
2447 self.exc_info = sys.exc_info() # preserve original exception
2448
2449 super(ExtractorError, self).__init__(''.join((
2450 format_field(ie, template='[%s] '),
2451 format_field(video_id, template='%s: '),
526d74ec 2452 self.msg,
1151c407 2453 format_field(cause, template=' (caused by %r)'),
2454 '' if expected else bug_reports_message())))
1c256f70 2455
01951dda
PH
2456 def format_traceback(self):
2457 if self.traceback is None:
2458 return None
28e614de 2459 return ''.join(traceback.format_tb(self.traceback))
01951dda 2460
1c256f70 2461
416c7fcb
PH
2462class UnsupportedError(ExtractorError):
2463 def __init__(self, url):
2464 super(UnsupportedError, self).__init__(
2465 'Unsupported URL: %s' % url, expected=True)
2466 self.url = url
2467
2468
55b3e45b
JMF
2469class RegexNotFoundError(ExtractorError):
2470 """Error when a regex didn't match"""
2471 pass
2472
2473
773f291d
S
2474class GeoRestrictedError(ExtractorError):
2475 """Geographic restriction Error exception.
2476
2477 This exception may be thrown when a video is not available from your
2478 geographic location due to geographic restrictions imposed by a website.
2479 """
b6e0c7d2 2480
773f291d
S
2481 def __init__(self, msg, countries=None):
2482 super(GeoRestrictedError, self).__init__(msg, expected=True)
2483 self.msg = msg
2484 self.countries = countries
2485
2486
bf5b9d85 2487class DownloadError(YoutubeDLError):
59ae15a5 2488 """Download Error exception.
d77c3dfd 2489
59ae15a5
PH
2490 This exception may be thrown by FileDownloader objects if they are not
2491 configured to continue on errors. They will contain the appropriate
2492 error message.
2493 """
5f6a1245 2494
8cc83b8d
FV
2495 def __init__(self, msg, exc_info=None):
2496 """ exc_info, if given, is the original exception that caused the trouble (as returned by sys.exc_info()). """
2497 super(DownloadError, self).__init__(msg)
2498 self.exc_info = exc_info
d77c3dfd
FV
2499
2500
498f5606 2501class EntryNotInPlaylist(YoutubeDLError):
2502 """Entry not in playlist exception.
2503
2504 This exception will be thrown by YoutubeDL when a requested entry
2505 is not found in the playlist info_dict
2506 """
2507 pass
2508
2509
bf5b9d85 2510class SameFileError(YoutubeDLError):
59ae15a5 2511 """Same File exception.
d77c3dfd 2512
59ae15a5
PH
2513 This exception will be thrown by FileDownloader objects if they detect
2514 multiple files would have to be downloaded to the same file on disk.
2515 """
2516 pass
d77c3dfd
FV
2517
2518
bf5b9d85 2519class PostProcessingError(YoutubeDLError):
59ae15a5 2520 """Post Processing exception.
d77c3dfd 2521
59ae15a5
PH
2522 This exception may be raised by PostProcessor's .run() method to
2523 indicate an error in the postprocessing task.
2524 """
5f6a1245 2525
7851b379 2526 def __init__(self, msg):
bf5b9d85 2527 super(PostProcessingError, self).__init__(msg)
7851b379 2528 self.msg = msg
d77c3dfd 2529
5f6a1245 2530
8b0d7497 2531class ExistingVideoReached(YoutubeDLError):
2532 """ --max-downloads limit has been reached. """
2533 pass
2534
2535
2536class RejectedVideoReached(YoutubeDLError):
2537 """ --max-downloads limit has been reached. """
2538 pass
2539
2540
51d9739f 2541class ThrottledDownload(YoutubeDLError):
2542 """ Download speed below --throttled-rate. """
2543 pass
2544
2545
bf5b9d85 2546class MaxDownloadsReached(YoutubeDLError):
59ae15a5
PH
2547 """ --max-downloads limit has been reached. """
2548 pass
d77c3dfd
FV
2549
2550
bf5b9d85 2551class UnavailableVideoError(YoutubeDLError):
59ae15a5 2552 """Unavailable Format exception.
d77c3dfd 2553
59ae15a5
PH
2554 This exception will be thrown when a video is requested
2555 in a format that is not available for that video.
2556 """
2557 pass
d77c3dfd
FV
2558
2559
bf5b9d85 2560class ContentTooShortError(YoutubeDLError):
59ae15a5 2561 """Content Too Short exception.
d77c3dfd 2562
59ae15a5
PH
2563 This exception may be raised by FileDownloader objects when a file they
2564 download is too small for what the server announced first, indicating
2565 the connection was probably interrupted.
2566 """
d77c3dfd 2567
59ae15a5 2568 def __init__(self, downloaded, expected):
bf5b9d85
PM
2569 super(ContentTooShortError, self).__init__(
2570 'Downloaded {0} bytes, expected {1} bytes'.format(downloaded, expected)
2571 )
2c7ed247 2572 # Both in bytes
59ae15a5
PH
2573 self.downloaded = downloaded
2574 self.expected = expected
d77c3dfd 2575
5f6a1245 2576
bf5b9d85 2577class XAttrMetadataError(YoutubeDLError):
efa97bdc
YCH
2578 def __init__(self, code=None, msg='Unknown error'):
2579 super(XAttrMetadataError, self).__init__(msg)
2580 self.code = code
bd264412 2581 self.msg = msg
efa97bdc
YCH
2582
2583 # Parsing code and msg
3089bc74 2584 if (self.code in (errno.ENOSPC, errno.EDQUOT)
a0566bbf 2585 or 'No space left' in self.msg or 'Disk quota exceeded' in self.msg):
efa97bdc
YCH
2586 self.reason = 'NO_SPACE'
2587 elif self.code == errno.E2BIG or 'Argument list too long' in self.msg:
2588 self.reason = 'VALUE_TOO_LONG'
2589 else:
2590 self.reason = 'NOT_SUPPORTED'
2591
2592
bf5b9d85 2593class XAttrUnavailableError(YoutubeDLError):
efa97bdc
YCH
2594 pass
2595
2596
c5a59d93 2597def _create_http_connection(ydl_handler, http_class, is_https, *args, **kwargs):
e5e78797
S
2598 # Working around python 2 bug (see http://bugs.python.org/issue17849) by limiting
2599 # expected HTTP responses to meet HTTP/1.0 or later (see also
067aa17e 2600 # https://github.com/ytdl-org/youtube-dl/issues/6727)
e5e78797 2601 if sys.version_info < (3, 0):
65220c3b
S
2602 kwargs['strict'] = True
2603 hc = http_class(*args, **compat_kwargs(kwargs))
be4a824d 2604 source_address = ydl_handler._params.get('source_address')
8959018a 2605
be4a824d 2606 if source_address is not None:
8959018a
AU
2607 # This is to workaround _create_connection() from socket where it will try all
2608 # address data from getaddrinfo() including IPv6. This filters the result from
2609 # getaddrinfo() based on the source_address value.
2610 # This is based on the cpython socket.create_connection() function.
2611 # https://github.com/python/cpython/blob/master/Lib/socket.py#L691
2612 def _create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None):
2613 host, port = address
2614 err = None
2615 addrs = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
9e21e6d9
S
2616 af = socket.AF_INET if '.' in source_address[0] else socket.AF_INET6
2617 ip_addrs = [addr for addr in addrs if addr[0] == af]
2618 if addrs and not ip_addrs:
2619 ip_version = 'v4' if af == socket.AF_INET else 'v6'
2620 raise socket.error(
2621 "No remote IP%s addresses available for connect, can't use '%s' as source address"
2622 % (ip_version, source_address[0]))
8959018a
AU
2623 for res in ip_addrs:
2624 af, socktype, proto, canonname, sa = res
2625 sock = None
2626 try:
2627 sock = socket.socket(af, socktype, proto)
2628 if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
2629 sock.settimeout(timeout)
2630 sock.bind(source_address)
2631 sock.connect(sa)
2632 err = None # Explicitly break reference cycle
2633 return sock
2634 except socket.error as _:
2635 err = _
2636 if sock is not None:
2637 sock.close()
2638 if err is not None:
2639 raise err
2640 else:
9e21e6d9
S
2641 raise socket.error('getaddrinfo returns an empty list')
2642 if hasattr(hc, '_create_connection'):
2643 hc._create_connection = _create_connection
be4a824d
PH
2644 sa = (source_address, 0)
2645 if hasattr(hc, 'source_address'): # Python 2.7+
2646 hc.source_address = sa
2647 else: # Python 2.6
2648 def _hc_connect(self, *args, **kwargs):
9e21e6d9 2649 sock = _create_connection(
be4a824d
PH
2650 (self.host, self.port), self.timeout, sa)
2651 if is_https:
d7932313
PH
2652 self.sock = ssl.wrap_socket(
2653 sock, self.key_file, self.cert_file,
2654 ssl_version=ssl.PROTOCOL_TLSv1)
be4a824d
PH
2655 else:
2656 self.sock = sock
2657 hc.connect = functools.partial(_hc_connect, hc)
2658
2659 return hc
2660
2661
87f0e62d 2662def handle_youtubedl_headers(headers):
992fc9d6
YCH
2663 filtered_headers = headers
2664
2665 if 'Youtubedl-no-compression' in filtered_headers:
2666 filtered_headers = dict((k, v) for k, v in filtered_headers.items() if k.lower() != 'accept-encoding')
87f0e62d 2667 del filtered_headers['Youtubedl-no-compression']
87f0e62d 2668
992fc9d6 2669 return filtered_headers
87f0e62d
YCH
2670
2671
acebc9cd 2672class YoutubeDLHandler(compat_urllib_request.HTTPHandler):
59ae15a5
PH
2673 """Handler for HTTP requests and responses.
2674
2675 This class, when installed with an OpenerDirector, automatically adds
2676 the standard headers to every HTTP request and handles gzipped and
2677 deflated responses from web servers. If compression is to be avoided in
2678 a particular request, the original request in the program code only has
0424ec30 2679 to include the HTTP header "Youtubedl-no-compression", which will be
59ae15a5
PH
2680 removed before making the real request.
2681
2682 Part of this code was copied from:
2683
2684 http://techknack.net/python-urllib2-handlers/
2685
2686 Andrew Rowls, the author of that code, agreed to release it to the
2687 public domain.
2688 """
2689
be4a824d
PH
2690 def __init__(self, params, *args, **kwargs):
2691 compat_urllib_request.HTTPHandler.__init__(self, *args, **kwargs)
2692 self._params = params
2693
2694 def http_open(self, req):
71aff188
YCH
2695 conn_class = compat_http_client.HTTPConnection
2696
2697 socks_proxy = req.headers.get('Ytdl-socks-proxy')
2698 if socks_proxy:
2699 conn_class = make_socks_conn_class(conn_class, socks_proxy)
2700 del req.headers['Ytdl-socks-proxy']
2701
be4a824d 2702 return self.do_open(functools.partial(
71aff188 2703 _create_http_connection, self, conn_class, False),
be4a824d
PH
2704 req)
2705
59ae15a5
PH
2706 @staticmethod
2707 def deflate(data):
fc2119f2 2708 if not data:
2709 return data
59ae15a5
PH
2710 try:
2711 return zlib.decompress(data, -zlib.MAX_WBITS)
2712 except zlib.error:
2713 return zlib.decompress(data)
2714
acebc9cd 2715 def http_request(self, req):
51f267d9
S
2716 # According to RFC 3986, URLs can not contain non-ASCII characters, however this is not
2717 # always respected by websites, some tend to give out URLs with non percent-encoded
2718 # non-ASCII characters (see telemb.py, ard.py [#3412])
2719 # urllib chokes on URLs with non-ASCII characters (see http://bugs.python.org/issue3991)
2720 # To work around aforementioned issue we will replace request's original URL with
2721 # percent-encoded one
2722 # Since redirects are also affected (e.g. http://www.southpark.de/alle-episoden/s18e09)
2723 # the code of this workaround has been moved here from YoutubeDL.urlopen()
2724 url = req.get_full_url()
2725 url_escaped = escape_url(url)
2726
2727 # Substitute URL if any change after escaping
2728 if url != url_escaped:
15d260eb 2729 req = update_Request(req, url=url_escaped)
51f267d9 2730
33ac271b 2731 for h, v in std_headers.items():
3d5f7a39
JK
2732 # Capitalize is needed because of Python bug 2275: http://bugs.python.org/issue2275
2733 # The dict keys are capitalized because of this bug by urllib
2734 if h.capitalize() not in req.headers:
33ac271b 2735 req.add_header(h, v)
87f0e62d
YCH
2736
2737 req.headers = handle_youtubedl_headers(req.headers)
989b4b2b
PH
2738
2739 if sys.version_info < (2, 7) and '#' in req.get_full_url():
2740 # Python 2.6 is brain-dead when it comes to fragments
2741 req._Request__original = req._Request__original.partition('#')[0]
2742 req._Request__r_type = req._Request__r_type.partition('#')[0]
2743
59ae15a5
PH
2744 return req
2745
acebc9cd 2746 def http_response(self, req, resp):
59ae15a5
PH
2747 old_resp = resp
2748 # gzip
2749 if resp.headers.get('Content-encoding', '') == 'gzip':
aa3e9507
PH
2750 content = resp.read()
2751 gz = gzip.GzipFile(fileobj=io.BytesIO(content), mode='rb')
2752 try:
2753 uncompressed = io.BytesIO(gz.read())
2754 except IOError as original_ioerror:
2755 # There may be junk add the end of the file
2756 # See http://stackoverflow.com/q/4928560/35070 for details
2757 for i in range(1, 1024):
2758 try:
2759 gz = gzip.GzipFile(fileobj=io.BytesIO(content[:-i]), mode='rb')
2760 uncompressed = io.BytesIO(gz.read())
2761 except IOError:
2762 continue
2763 break
2764 else:
2765 raise original_ioerror
b407d853 2766 resp = compat_urllib_request.addinfourl(uncompressed, old_resp.headers, old_resp.url, old_resp.code)
59ae15a5 2767 resp.msg = old_resp.msg
c047270c 2768 del resp.headers['Content-encoding']
59ae15a5
PH
2769 # deflate
2770 if resp.headers.get('Content-encoding', '') == 'deflate':
2771 gz = io.BytesIO(self.deflate(resp.read()))
b407d853 2772 resp = compat_urllib_request.addinfourl(gz, old_resp.headers, old_resp.url, old_resp.code)
59ae15a5 2773 resp.msg = old_resp.msg
c047270c 2774 del resp.headers['Content-encoding']
ad729172 2775 # Percent-encode redirect URL of Location HTTP header to satisfy RFC 3986 (see
067aa17e 2776 # https://github.com/ytdl-org/youtube-dl/issues/6457).
5a4d9ddb
S
2777 if 300 <= resp.code < 400:
2778 location = resp.headers.get('Location')
2779 if location:
2780 # As of RFC 2616 default charset is iso-8859-1 that is respected by python 3
2781 if sys.version_info >= (3, 0):
2782 location = location.encode('iso-8859-1').decode('utf-8')
0ea59007
YCH
2783 else:
2784 location = location.decode('utf-8')
5a4d9ddb
S
2785 location_escaped = escape_url(location)
2786 if location != location_escaped:
2787 del resp.headers['Location']
9a4aec8b
YCH
2788 if sys.version_info < (3, 0):
2789 location_escaped = location_escaped.encode('utf-8')
5a4d9ddb 2790 resp.headers['Location'] = location_escaped
59ae15a5 2791 return resp
0f8d03f8 2792
acebc9cd
PH
2793 https_request = http_request
2794 https_response = http_response
bf50b038 2795
5de90176 2796
71aff188
YCH
2797def make_socks_conn_class(base_class, socks_proxy):
2798 assert issubclass(base_class, (
2799 compat_http_client.HTTPConnection, compat_http_client.HTTPSConnection))
2800
2801 url_components = compat_urlparse.urlparse(socks_proxy)
2802 if url_components.scheme.lower() == 'socks5':
2803 socks_type = ProxyType.SOCKS5
2804 elif url_components.scheme.lower() in ('socks', 'socks4'):
2805 socks_type = ProxyType.SOCKS4
51fb4995
YCH
2806 elif url_components.scheme.lower() == 'socks4a':
2807 socks_type = ProxyType.SOCKS4A
71aff188 2808
cdd94c2e
YCH
2809 def unquote_if_non_empty(s):
2810 if not s:
2811 return s
2812 return compat_urllib_parse_unquote_plus(s)
2813
71aff188
YCH
2814 proxy_args = (
2815 socks_type,
2816 url_components.hostname, url_components.port or 1080,
2817 True, # Remote DNS
cdd94c2e
YCH
2818 unquote_if_non_empty(url_components.username),
2819 unquote_if_non_empty(url_components.password),
71aff188
YCH
2820 )
2821
2822 class SocksConnection(base_class):
2823 def connect(self):
2824 self.sock = sockssocket()
2825 self.sock.setproxy(*proxy_args)
2826 if type(self.timeout) in (int, float):
2827 self.sock.settimeout(self.timeout)
2828 self.sock.connect((self.host, self.port))
2829
2830 if isinstance(self, compat_http_client.HTTPSConnection):
2831 if hasattr(self, '_context'): # Python > 2.6
2832 self.sock = self._context.wrap_socket(
2833 self.sock, server_hostname=self.host)
2834 else:
2835 self.sock = ssl.wrap_socket(self.sock)
2836
2837 return SocksConnection
2838
2839
be4a824d
PH
2840class YoutubeDLHTTPSHandler(compat_urllib_request.HTTPSHandler):
2841 def __init__(self, params, https_conn_class=None, *args, **kwargs):
2842 compat_urllib_request.HTTPSHandler.__init__(self, *args, **kwargs)
2843 self._https_conn_class = https_conn_class or compat_http_client.HTTPSConnection
2844 self._params = params
2845
2846 def https_open(self, req):
4f264c02 2847 kwargs = {}
71aff188
YCH
2848 conn_class = self._https_conn_class
2849
4f264c02
JMF
2850 if hasattr(self, '_context'): # python > 2.6
2851 kwargs['context'] = self._context
2852 if hasattr(self, '_check_hostname'): # python 3.x
2853 kwargs['check_hostname'] = self._check_hostname
71aff188
YCH
2854
2855 socks_proxy = req.headers.get('Ytdl-socks-proxy')
2856 if socks_proxy:
2857 conn_class = make_socks_conn_class(conn_class, socks_proxy)
2858 del req.headers['Ytdl-socks-proxy']
2859
be4a824d 2860 return self.do_open(functools.partial(
71aff188 2861 _create_http_connection, self, conn_class, True),
4f264c02 2862 req, **kwargs)
be4a824d
PH
2863
2864
1bab3437 2865class YoutubeDLCookieJar(compat_cookiejar.MozillaCookieJar):
f1a8511f
S
2866 """
2867 See [1] for cookie file format.
2868
2869 1. https://curl.haxx.se/docs/http-cookies.html
2870 """
e7e62441 2871 _HTTPONLY_PREFIX = '#HttpOnly_'
c380cc28
S
2872 _ENTRY_LEN = 7
2873 _HEADER = '''# Netscape HTTP Cookie File
7a5c1cfe 2874# This file is generated by yt-dlp. Do not edit.
c380cc28
S
2875
2876'''
2877 _CookieFileEntry = collections.namedtuple(
2878 'CookieFileEntry',
2879 ('domain_name', 'include_subdomains', 'path', 'https_only', 'expires_at', 'name', 'value'))
e7e62441 2880
1bab3437 2881 def save(self, filename=None, ignore_discard=False, ignore_expires=False):
c380cc28
S
2882 """
2883 Save cookies to a file.
2884
2885 Most of the code is taken from CPython 3.8 and slightly adapted
2886 to support cookie files with UTF-8 in both python 2 and 3.
2887 """
2888 if filename is None:
2889 if self.filename is not None:
2890 filename = self.filename
2891 else:
2892 raise ValueError(compat_cookiejar.MISSING_FILENAME_TEXT)
2893
1bab3437
S
2894 # Store session cookies with `expires` set to 0 instead of an empty
2895 # string
2896 for cookie in self:
2897 if cookie.expires is None:
2898 cookie.expires = 0
c380cc28
S
2899
2900 with io.open(filename, 'w', encoding='utf-8') as f:
2901 f.write(self._HEADER)
2902 now = time.time()
2903 for cookie in self:
2904 if not ignore_discard and cookie.discard:
2905 continue
2906 if not ignore_expires and cookie.is_expired(now):
2907 continue
2908 if cookie.secure:
2909 secure = 'TRUE'
2910 else:
2911 secure = 'FALSE'
2912 if cookie.domain.startswith('.'):
2913 initial_dot = 'TRUE'
2914 else:
2915 initial_dot = 'FALSE'
2916 if cookie.expires is not None:
2917 expires = compat_str(cookie.expires)
2918 else:
2919 expires = ''
2920 if cookie.value is None:
2921 # cookies.txt regards 'Set-Cookie: foo' as a cookie
2922 # with no name, whereas http.cookiejar regards it as a
2923 # cookie with no value.
2924 name = ''
2925 value = cookie.name
2926 else:
2927 name = cookie.name
2928 value = cookie.value
2929 f.write(
2930 '\t'.join([cookie.domain, initial_dot, cookie.path,
2931 secure, expires, name, value]) + '\n')
1bab3437
S
2932
2933 def load(self, filename=None, ignore_discard=False, ignore_expires=False):
e7e62441 2934 """Load cookies from a file."""
2935 if filename is None:
2936 if self.filename is not None:
2937 filename = self.filename
2938 else:
2939 raise ValueError(compat_cookiejar.MISSING_FILENAME_TEXT)
2940
c380cc28
S
2941 def prepare_line(line):
2942 if line.startswith(self._HTTPONLY_PREFIX):
2943 line = line[len(self._HTTPONLY_PREFIX):]
2944 # comments and empty lines are fine
2945 if line.startswith('#') or not line.strip():
2946 return line
2947 cookie_list = line.split('\t')
2948 if len(cookie_list) != self._ENTRY_LEN:
2949 raise compat_cookiejar.LoadError('invalid length %d' % len(cookie_list))
2950 cookie = self._CookieFileEntry(*cookie_list)
2951 if cookie.expires_at and not cookie.expires_at.isdigit():
2952 raise compat_cookiejar.LoadError('invalid expires at %s' % cookie.expires_at)
2953 return line
2954
e7e62441 2955 cf = io.StringIO()
c380cc28 2956 with io.open(filename, encoding='utf-8') as f:
e7e62441 2957 for line in f:
c380cc28
S
2958 try:
2959 cf.write(prepare_line(line))
2960 except compat_cookiejar.LoadError as e:
2961 write_string(
2962 'WARNING: skipping cookie file entry due to %s: %r\n'
2963 % (e, line), sys.stderr)
2964 continue
e7e62441 2965 cf.seek(0)
2966 self._really_load(cf, filename, ignore_discard, ignore_expires)
1bab3437
S
2967 # Session cookies are denoted by either `expires` field set to
2968 # an empty string or 0. MozillaCookieJar only recognizes the former
2969 # (see [1]). So we need force the latter to be recognized as session
2970 # cookies on our own.
2971 # Session cookies may be important for cookies-based authentication,
2972 # e.g. usually, when user does not check 'Remember me' check box while
2973 # logging in on a site, some important cookies are stored as session
2974 # cookies so that not recognizing them will result in failed login.
2975 # 1. https://bugs.python.org/issue17164
2976 for cookie in self:
2977 # Treat `expires=0` cookies as session cookies
2978 if cookie.expires == 0:
2979 cookie.expires = None
2980 cookie.discard = True
2981
2982
a6420bf5
S
2983class YoutubeDLCookieProcessor(compat_urllib_request.HTTPCookieProcessor):
2984 def __init__(self, cookiejar=None):
2985 compat_urllib_request.HTTPCookieProcessor.__init__(self, cookiejar)
2986
2987 def http_response(self, request, response):
2988 # Python 2 will choke on next HTTP request in row if there are non-ASCII
2989 # characters in Set-Cookie HTTP header of last response (see
067aa17e 2990 # https://github.com/ytdl-org/youtube-dl/issues/6769).
a6420bf5
S
2991 # In order to at least prevent crashing we will percent encode Set-Cookie
2992 # header before HTTPCookieProcessor starts processing it.
e28034c5
S
2993 # if sys.version_info < (3, 0) and response.headers:
2994 # for set_cookie_header in ('Set-Cookie', 'Set-Cookie2'):
2995 # set_cookie = response.headers.get(set_cookie_header)
2996 # if set_cookie:
2997 # set_cookie_escaped = compat_urllib_parse.quote(set_cookie, b"%/;:@&=+$,!~*'()?#[] ")
2998 # if set_cookie != set_cookie_escaped:
2999 # del response.headers[set_cookie_header]
3000 # response.headers[set_cookie_header] = set_cookie_escaped
a6420bf5
S
3001 return compat_urllib_request.HTTPCookieProcessor.http_response(self, request, response)
3002
f5fa042c 3003 https_request = compat_urllib_request.HTTPCookieProcessor.http_request
a6420bf5
S
3004 https_response = http_response
3005
3006
fca6dba8 3007class YoutubeDLRedirectHandler(compat_urllib_request.HTTPRedirectHandler):
201c1459 3008 """YoutubeDL redirect handler
3009
3010 The code is based on HTTPRedirectHandler implementation from CPython [1].
3011
3012 This redirect handler solves two issues:
3013 - ensures redirect URL is always unicode under python 2
3014 - introduces support for experimental HTTP response status code
3015 308 Permanent Redirect [2] used by some sites [3]
3016
3017 1. https://github.com/python/cpython/blob/master/Lib/urllib/request.py
3018 2. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/308
3019 3. https://github.com/ytdl-org/youtube-dl/issues/28768
3020 """
3021
3022 http_error_301 = http_error_303 = http_error_307 = http_error_308 = compat_urllib_request.HTTPRedirectHandler.http_error_302
3023
3024 def redirect_request(self, req, fp, code, msg, headers, newurl):
3025 """Return a Request or None in response to a redirect.
3026
3027 This is called by the http_error_30x methods when a
3028 redirection response is received. If a redirection should
3029 take place, return a new Request to allow http_error_30x to
3030 perform the redirect. Otherwise, raise HTTPError if no-one
3031 else should try to handle this url. Return None if you can't
3032 but another Handler might.
3033 """
3034 m = req.get_method()
3035 if (not (code in (301, 302, 303, 307, 308) and m in ("GET", "HEAD")
3036 or code in (301, 302, 303) and m == "POST")):
3037 raise compat_HTTPError(req.full_url, code, msg, headers, fp)
3038 # Strictly (according to RFC 2616), 301 or 302 in response to
3039 # a POST MUST NOT cause a redirection without confirmation
3040 # from the user (of urllib.request, in this case). In practice,
3041 # essentially all clients do redirect in this case, so we do
3042 # the same.
3043
3044 # On python 2 urlh.geturl() may sometimes return redirect URL
3045 # as byte string instead of unicode. This workaround allows
3046 # to force it always return unicode.
3047 if sys.version_info[0] < 3:
3048 newurl = compat_str(newurl)
3049
3050 # Be conciliant with URIs containing a space. This is mainly
3051 # redundant with the more complete encoding done in http_error_302(),
3052 # but it is kept for compatibility with other callers.
3053 newurl = newurl.replace(' ', '%20')
3054
3055 CONTENT_HEADERS = ("content-length", "content-type")
3056 # NB: don't use dict comprehension for python 2.6 compatibility
3057 newheaders = dict((k, v) for k, v in req.headers.items()
3058 if k.lower() not in CONTENT_HEADERS)
3059 return compat_urllib_request.Request(
3060 newurl, headers=newheaders, origin_req_host=req.origin_req_host,
3061 unverifiable=True)
fca6dba8
S
3062
3063
46f59e89
S
3064def extract_timezone(date_str):
3065 m = re.search(
f137e4c2 3066 r'''(?x)
3067 ^.{8,}? # >=8 char non-TZ prefix, if present
3068 (?P<tz>Z| # just the UTC Z, or
3069 (?:(?<=.\b\d{4}|\b\d{2}:\d\d)| # preceded by 4 digits or hh:mm or
3070 (?<!.\b[a-zA-Z]{3}|[a-zA-Z]{4}|..\b\d\d)) # not preceded by 3 alpha word or >= 4 alpha or 2 digits
3071 [ ]? # optional space
3072 (?P<sign>\+|-) # +/-
3073 (?P<hours>[0-9]{2}):?(?P<minutes>[0-9]{2}) # hh[:]mm
3074 $)
3075 ''', date_str)
46f59e89
S
3076 if not m:
3077 timezone = datetime.timedelta()
3078 else:
3079 date_str = date_str[:-len(m.group('tz'))]
3080 if not m.group('sign'):
3081 timezone = datetime.timedelta()
3082 else:
3083 sign = 1 if m.group('sign') == '+' else -1
3084 timezone = datetime.timedelta(
3085 hours=sign * int(m.group('hours')),
3086 minutes=sign * int(m.group('minutes')))
3087 return timezone, date_str
3088
3089
08b38d54 3090def parse_iso8601(date_str, delimiter='T', timezone=None):
912b38b4
PH
3091 """ Return a UNIX timestamp from the given date """
3092
3093 if date_str is None:
3094 return None
3095
52c3a6e4
S
3096 date_str = re.sub(r'\.[0-9]+', '', date_str)
3097
08b38d54 3098 if timezone is None:
46f59e89
S
3099 timezone, date_str = extract_timezone(date_str)
3100
52c3a6e4
S
3101 try:
3102 date_format = '%Y-%m-%d{0}%H:%M:%S'.format(delimiter)
3103 dt = datetime.datetime.strptime(date_str, date_format) - timezone
3104 return calendar.timegm(dt.timetuple())
3105 except ValueError:
3106 pass
912b38b4
PH
3107
3108
46f59e89
S
3109def date_formats(day_first=True):
3110 return DATE_FORMATS_DAY_FIRST if day_first else DATE_FORMATS_MONTH_FIRST
3111
3112
42bdd9d0 3113def unified_strdate(date_str, day_first=True):
bf50b038 3114 """Return a string with the date in the format YYYYMMDD"""
64e7ad60
PH
3115
3116 if date_str is None:
3117 return None
bf50b038 3118 upload_date = None
5f6a1245 3119 # Replace commas
026fcc04 3120 date_str = date_str.replace(',', ' ')
42bdd9d0 3121 # Remove AM/PM + timezone
9bb8e0a3 3122 date_str = re.sub(r'(?i)\s*(?:AM|PM)(?:\s+[A-Z]+)?', '', date_str)
46f59e89 3123 _, date_str = extract_timezone(date_str)
42bdd9d0 3124
46f59e89 3125 for expression in date_formats(day_first):
bf50b038
JMF
3126 try:
3127 upload_date = datetime.datetime.strptime(date_str, expression).strftime('%Y%m%d')
5de90176 3128 except ValueError:
bf50b038 3129 pass
42393ce2
PH
3130 if upload_date is None:
3131 timetuple = email.utils.parsedate_tz(date_str)
3132 if timetuple:
c6b9cf05
S
3133 try:
3134 upload_date = datetime.datetime(*timetuple[:6]).strftime('%Y%m%d')
3135 except ValueError:
3136 pass
6a750402
JMF
3137 if upload_date is not None:
3138 return compat_str(upload_date)
bf50b038 3139
5f6a1245 3140
46f59e89
S
3141def unified_timestamp(date_str, day_first=True):
3142 if date_str is None:
3143 return None
3144
2ae2ffda 3145 date_str = re.sub(r'[,|]', '', date_str)
46f59e89 3146
7dc2a74e 3147 pm_delta = 12 if re.search(r'(?i)PM', date_str) else 0
46f59e89
S
3148 timezone, date_str = extract_timezone(date_str)
3149
3150 # Remove AM/PM + timezone
3151 date_str = re.sub(r'(?i)\s*(?:AM|PM)(?:\s+[A-Z]+)?', '', date_str)
3152
deef3195
S
3153 # Remove unrecognized timezones from ISO 8601 alike timestamps
3154 m = re.search(r'\d{1,2}:\d{1,2}(?:\.\d+)?(?P<tz>\s*[A-Z]+)$', date_str)
3155 if m:
3156 date_str = date_str[:-len(m.group('tz'))]
3157
f226880c
PH
3158 # Python only supports microseconds, so remove nanoseconds
3159 m = re.search(r'^([0-9]{4,}-[0-9]{1,2}-[0-9]{1,2}T[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}\.[0-9]{6})[0-9]+$', date_str)
3160 if m:
3161 date_str = m.group(1)
3162
46f59e89
S
3163 for expression in date_formats(day_first):
3164 try:
7dc2a74e 3165 dt = datetime.datetime.strptime(date_str, expression) - timezone + datetime.timedelta(hours=pm_delta)
46f59e89
S
3166 return calendar.timegm(dt.timetuple())
3167 except ValueError:
3168 pass
3169 timetuple = email.utils.parsedate_tz(date_str)
3170 if timetuple:
7dc2a74e 3171 return calendar.timegm(timetuple) + pm_delta * 3600
46f59e89
S
3172
3173
28e614de 3174def determine_ext(url, default_ext='unknown_video'):
85750f89 3175 if url is None or '.' not in url:
f4776371 3176 return default_ext
9cb9a5df 3177 guess = url.partition('?')[0].rpartition('.')[2]
73e79f2a
PH
3178 if re.match(r'^[A-Za-z0-9]+$', guess):
3179 return guess
a7aaa398
S
3180 # Try extract ext from URLs like http://example.com/foo/bar.mp4/?download
3181 elif guess.rstrip('/') in KNOWN_EXTENSIONS:
9cb9a5df 3182 return guess.rstrip('/')
73e79f2a 3183 else:
cbdbb766 3184 return default_ext
73e79f2a 3185
5f6a1245 3186
824fa511
S
3187def subtitles_filename(filename, sub_lang, sub_format, expected_real_ext=None):
3188 return replace_extension(filename, sub_lang + '.' + sub_format, expected_real_ext)
d4051a8e 3189
5f6a1245 3190
9e62f283 3191def datetime_from_str(date_str, precision='auto', format='%Y%m%d'):
37254abc
JMF
3192 """
3193 Return a datetime object from a string in the format YYYYMMDD or
9e62f283 3194 (now|today|date)[+-][0-9](microsecond|second|minute|hour|day|week|month|year)(s)?
3195
3196 format: string date format used to return datetime object from
3197 precision: round the time portion of a datetime object.
3198 auto|microsecond|second|minute|hour|day.
3199 auto: round to the unit provided in date_str (if applicable).
3200 """
3201 auto_precision = False
3202 if precision == 'auto':
3203 auto_precision = True
3204 precision = 'microsecond'
3205 today = datetime_round(datetime.datetime.now(), precision)
f8795e10 3206 if date_str in ('now', 'today'):
37254abc 3207 return today
f8795e10
PH
3208 if date_str == 'yesterday':
3209 return today - datetime.timedelta(days=1)
9e62f283 3210 match = re.match(
3211 r'(?P<start>.+)(?P<sign>[+-])(?P<time>\d+)(?P<unit>microsecond|second|minute|hour|day|week|month|year)(s)?',
3212 date_str)
37254abc 3213 if match is not None:
9e62f283 3214 start_time = datetime_from_str(match.group('start'), precision, format)
3215 time = int(match.group('time')) * (-1 if match.group('sign') == '-' else 1)
37254abc 3216 unit = match.group('unit')
9e62f283 3217 if unit == 'month' or unit == 'year':
3218 new_date = datetime_add_months(start_time, time * 12 if unit == 'year' else time)
37254abc 3219 unit = 'day'
9e62f283 3220 else:
3221 if unit == 'week':
3222 unit = 'day'
3223 time *= 7
3224 delta = datetime.timedelta(**{unit + 's': time})
3225 new_date = start_time + delta
3226 if auto_precision:
3227 return datetime_round(new_date, unit)
3228 return new_date
3229
3230 return datetime_round(datetime.datetime.strptime(date_str, format), precision)
3231
3232
3233def date_from_str(date_str, format='%Y%m%d'):
3234 """
3235 Return a datetime object from a string in the format YYYYMMDD or
3236 (now|today|date)[+-][0-9](microsecond|second|minute|hour|day|week|month|year)(s)?
3237
3238 format: string date format used to return datetime object from
3239 """
3240 return datetime_from_str(date_str, precision='microsecond', format=format).date()
3241
3242
3243def datetime_add_months(dt, months):
3244 """Increment/Decrement a datetime object by months."""
3245 month = dt.month + months - 1
3246 year = dt.year + month // 12
3247 month = month % 12 + 1
3248 day = min(dt.day, calendar.monthrange(year, month)[1])
3249 return dt.replace(year, month, day)
3250
3251
3252def datetime_round(dt, precision='day'):
3253 """
3254 Round a datetime object's time to a specific precision
3255 """
3256 if precision == 'microsecond':
3257 return dt
3258
3259 unit_seconds = {
3260 'day': 86400,
3261 'hour': 3600,
3262 'minute': 60,
3263 'second': 1,
3264 }
3265 roundto = lambda x, n: ((x + n / 2) // n) * n
3266 timestamp = calendar.timegm(dt.timetuple())
3267 return datetime.datetime.utcfromtimestamp(roundto(timestamp, unit_seconds[precision]))
5f6a1245
JW
3268
3269
e63fc1be 3270def hyphenate_date(date_str):
3271 """
3272 Convert a date in 'YYYYMMDD' format to 'YYYY-MM-DD' format"""
3273 match = re.match(r'^(\d\d\d\d)(\d\d)(\d\d)$', date_str)
3274 if match is not None:
3275 return '-'.join(match.groups())
3276 else:
3277 return date_str
3278
5f6a1245 3279
bd558525
JMF
3280class DateRange(object):
3281 """Represents a time interval between two dates"""
5f6a1245 3282
bd558525
JMF
3283 def __init__(self, start=None, end=None):
3284 """start and end must be strings in the format accepted by date"""
3285 if start is not None:
3286 self.start = date_from_str(start)
3287 else:
3288 self.start = datetime.datetime.min.date()
3289 if end is not None:
3290 self.end = date_from_str(end)
3291 else:
3292 self.end = datetime.datetime.max.date()
37254abc 3293 if self.start > self.end:
bd558525 3294 raise ValueError('Date range: "%s" , the start date must be before the end date' % self)
5f6a1245 3295
bd558525
JMF
3296 @classmethod
3297 def day(cls, day):
3298 """Returns a range that only contains the given day"""
5f6a1245
JW
3299 return cls(day, day)
3300
bd558525
JMF
3301 def __contains__(self, date):
3302 """Check if the date is in the range"""
37254abc
JMF
3303 if not isinstance(date, datetime.date):
3304 date = date_from_str(date)
3305 return self.start <= date <= self.end
5f6a1245 3306
bd558525 3307 def __str__(self):
5f6a1245 3308 return '%s - %s' % (self.start.isoformat(), self.end.isoformat())
c496ca96
PH
3309
3310
3311def platform_name():
3312 """ Returns the platform name as a compat_str """
3313 res = platform.platform()
3314 if isinstance(res, bytes):
3315 res = res.decode(preferredencoding())
3316
3317 assert isinstance(res, compat_str)
3318 return res
c257baff
PH
3319
3320
49fa4d9a
N
3321def get_windows_version():
3322 ''' Get Windows version. None if it's not running on Windows '''
3323 if compat_os_name == 'nt':
3324 return version_tuple(platform.win32_ver()[1])
3325 else:
3326 return None
3327
3328
b58ddb32
PH
3329def _windows_write_string(s, out):
3330 """ Returns True if the string was written using special methods,
3331 False if it has yet to be written out."""
3332 # Adapted from http://stackoverflow.com/a/3259271/35070
3333
3334 import ctypes
3335 import ctypes.wintypes
3336
3337 WIN_OUTPUT_IDS = {
3338 1: -11,
3339 2: -12,
3340 }
3341
a383a98a
PH
3342 try:
3343 fileno = out.fileno()
3344 except AttributeError:
3345 # If the output stream doesn't have a fileno, it's virtual
3346 return False
aa42e873
PH
3347 except io.UnsupportedOperation:
3348 # Some strange Windows pseudo files?
3349 return False
b58ddb32
PH
3350 if fileno not in WIN_OUTPUT_IDS:
3351 return False
3352
d7cd9a9e 3353 GetStdHandle = compat_ctypes_WINFUNCTYPE(
b58ddb32 3354 ctypes.wintypes.HANDLE, ctypes.wintypes.DWORD)(
d7cd9a9e 3355 ('GetStdHandle', ctypes.windll.kernel32))
b58ddb32
PH
3356 h = GetStdHandle(WIN_OUTPUT_IDS[fileno])
3357
d7cd9a9e 3358 WriteConsoleW = compat_ctypes_WINFUNCTYPE(
b58ddb32
PH
3359 ctypes.wintypes.BOOL, ctypes.wintypes.HANDLE, ctypes.wintypes.LPWSTR,
3360 ctypes.wintypes.DWORD, ctypes.POINTER(ctypes.wintypes.DWORD),
d7cd9a9e 3361 ctypes.wintypes.LPVOID)(('WriteConsoleW', ctypes.windll.kernel32))
b58ddb32
PH
3362 written = ctypes.wintypes.DWORD(0)
3363
d7cd9a9e 3364 GetFileType = compat_ctypes_WINFUNCTYPE(ctypes.wintypes.DWORD, ctypes.wintypes.DWORD)(('GetFileType', ctypes.windll.kernel32))
b58ddb32
PH
3365 FILE_TYPE_CHAR = 0x0002
3366 FILE_TYPE_REMOTE = 0x8000
d7cd9a9e 3367 GetConsoleMode = compat_ctypes_WINFUNCTYPE(
b58ddb32
PH
3368 ctypes.wintypes.BOOL, ctypes.wintypes.HANDLE,
3369 ctypes.POINTER(ctypes.wintypes.DWORD))(
d7cd9a9e 3370 ('GetConsoleMode', ctypes.windll.kernel32))
b58ddb32
PH
3371 INVALID_HANDLE_VALUE = ctypes.wintypes.DWORD(-1).value
3372
3373 def not_a_console(handle):
3374 if handle == INVALID_HANDLE_VALUE or handle is None:
3375 return True
3089bc74
S
3376 return ((GetFileType(handle) & ~FILE_TYPE_REMOTE) != FILE_TYPE_CHAR
3377 or GetConsoleMode(handle, ctypes.byref(ctypes.wintypes.DWORD())) == 0)
b58ddb32
PH
3378
3379 if not_a_console(h):
3380 return False
3381
d1b9c912
PH
3382 def next_nonbmp_pos(s):
3383 try:
3384 return next(i for i, c in enumerate(s) if ord(c) > 0xffff)
3385 except StopIteration:
3386 return len(s)
3387
3388 while s:
3389 count = min(next_nonbmp_pos(s), 1024)
3390
b58ddb32 3391 ret = WriteConsoleW(
d1b9c912 3392 h, s, count if count else 2, ctypes.byref(written), None)
b58ddb32
PH
3393 if ret == 0:
3394 raise OSError('Failed to write string')
d1b9c912
PH
3395 if not count: # We just wrote a non-BMP character
3396 assert written.value == 2
3397 s = s[1:]
3398 else:
3399 assert written.value > 0
3400 s = s[written.value:]
b58ddb32
PH
3401 return True
3402
3403
734f90bb 3404def write_string(s, out=None, encoding=None):
7459e3a2
PH
3405 if out is None:
3406 out = sys.stderr
8bf48f23 3407 assert type(s) == compat_str
7459e3a2 3408
b58ddb32
PH
3409 if sys.platform == 'win32' and encoding is None and hasattr(out, 'fileno'):
3410 if _windows_write_string(s, out):
3411 return
3412
3089bc74
S
3413 if ('b' in getattr(out, 'mode', '')
3414 or sys.version_info[0] < 3): # Python 2 lies about mode of sys.stderr
104aa738
PH
3415 byt = s.encode(encoding or preferredencoding(), 'ignore')
3416 out.write(byt)
3417 elif hasattr(out, 'buffer'):
3418 enc = encoding or getattr(out, 'encoding', None) or preferredencoding()
3419 byt = s.encode(enc, 'ignore')
3420 out.buffer.write(byt)
3421 else:
8bf48f23 3422 out.write(s)
7459e3a2
PH
3423 out.flush()
3424
3425
48ea9cea
PH
3426def bytes_to_intlist(bs):
3427 if not bs:
3428 return []
3429 if isinstance(bs[0], int): # Python 3
3430 return list(bs)
3431 else:
3432 return [ord(c) for c in bs]
3433
c257baff 3434
cba892fa 3435def intlist_to_bytes(xs):
3436 if not xs:
3437 return b''
edaa23f8 3438 return compat_struct_pack('%dB' % len(xs), *xs)
c38b1e77
PH
3439
3440
c1c9a79c
PH
3441# Cross-platform file locking
3442if sys.platform == 'win32':
3443 import ctypes.wintypes
3444 import msvcrt
3445
3446 class OVERLAPPED(ctypes.Structure):
3447 _fields_ = [
3448 ('Internal', ctypes.wintypes.LPVOID),
3449 ('InternalHigh', ctypes.wintypes.LPVOID),
3450 ('Offset', ctypes.wintypes.DWORD),
3451 ('OffsetHigh', ctypes.wintypes.DWORD),
3452 ('hEvent', ctypes.wintypes.HANDLE),
3453 ]
3454
3455 kernel32 = ctypes.windll.kernel32
3456 LockFileEx = kernel32.LockFileEx
3457 LockFileEx.argtypes = [
3458 ctypes.wintypes.HANDLE, # hFile
3459 ctypes.wintypes.DWORD, # dwFlags
3460 ctypes.wintypes.DWORD, # dwReserved
3461 ctypes.wintypes.DWORD, # nNumberOfBytesToLockLow
3462 ctypes.wintypes.DWORD, # nNumberOfBytesToLockHigh
3463 ctypes.POINTER(OVERLAPPED) # Overlapped
3464 ]
3465 LockFileEx.restype = ctypes.wintypes.BOOL
3466 UnlockFileEx = kernel32.UnlockFileEx
3467 UnlockFileEx.argtypes = [
3468 ctypes.wintypes.HANDLE, # hFile
3469 ctypes.wintypes.DWORD, # dwReserved
3470 ctypes.wintypes.DWORD, # nNumberOfBytesToLockLow
3471 ctypes.wintypes.DWORD, # nNumberOfBytesToLockHigh
3472 ctypes.POINTER(OVERLAPPED) # Overlapped
3473 ]
3474 UnlockFileEx.restype = ctypes.wintypes.BOOL
3475 whole_low = 0xffffffff
3476 whole_high = 0x7fffffff
3477
3478 def _lock_file(f, exclusive):
3479 overlapped = OVERLAPPED()
3480 overlapped.Offset = 0
3481 overlapped.OffsetHigh = 0
3482 overlapped.hEvent = 0
3483 f._lock_file_overlapped_p = ctypes.pointer(overlapped)
3484 handle = msvcrt.get_osfhandle(f.fileno())
3485 if not LockFileEx(handle, 0x2 if exclusive else 0x0, 0,
3486 whole_low, whole_high, f._lock_file_overlapped_p):
3487 raise OSError('Locking file failed: %r' % ctypes.FormatError())
3488
3489 def _unlock_file(f):
3490 assert f._lock_file_overlapped_p
3491 handle = msvcrt.get_osfhandle(f.fileno())
3492 if not UnlockFileEx(handle, 0,
3493 whole_low, whole_high, f._lock_file_overlapped_p):
3494 raise OSError('Unlocking file failed: %r' % ctypes.FormatError())
3495
3496else:
399a76e6
YCH
3497 # Some platforms, such as Jython, is missing fcntl
3498 try:
3499 import fcntl
c1c9a79c 3500
399a76e6
YCH
3501 def _lock_file(f, exclusive):
3502 fcntl.flock(f, fcntl.LOCK_EX if exclusive else fcntl.LOCK_SH)
c1c9a79c 3503
399a76e6
YCH
3504 def _unlock_file(f):
3505 fcntl.flock(f, fcntl.LOCK_UN)
3506 except ImportError:
3507 UNSUPPORTED_MSG = 'file locking is not supported on this platform'
3508
3509 def _lock_file(f, exclusive):
3510 raise IOError(UNSUPPORTED_MSG)
3511
3512 def _unlock_file(f):
3513 raise IOError(UNSUPPORTED_MSG)
c1c9a79c
PH
3514
3515
3516class locked_file(object):
3517 def __init__(self, filename, mode, encoding=None):
3518 assert mode in ['r', 'a', 'w']
3519 self.f = io.open(filename, mode, encoding=encoding)
3520 self.mode = mode
3521
3522 def __enter__(self):
3523 exclusive = self.mode != 'r'
3524 try:
3525 _lock_file(self.f, exclusive)
3526 except IOError:
3527 self.f.close()
3528 raise
3529 return self
3530
3531 def __exit__(self, etype, value, traceback):
3532 try:
3533 _unlock_file(self.f)
3534 finally:
3535 self.f.close()
3536
3537 def __iter__(self):
3538 return iter(self.f)
3539
3540 def write(self, *args):
3541 return self.f.write(*args)
3542
3543 def read(self, *args):
3544 return self.f.read(*args)
4eb7f1d1
JMF
3545
3546
4644ac55
S
3547def get_filesystem_encoding():
3548 encoding = sys.getfilesystemencoding()
3549 return encoding if encoding is not None else 'utf-8'
3550
3551
4eb7f1d1 3552def shell_quote(args):
a6a173c2 3553 quoted_args = []
4644ac55 3554 encoding = get_filesystem_encoding()
a6a173c2
JMF
3555 for a in args:
3556 if isinstance(a, bytes):
3557 # We may get a filename encoded with 'encodeFilename'
3558 a = a.decode(encoding)
aefce8e6 3559 quoted_args.append(compat_shlex_quote(a))
28e614de 3560 return ' '.join(quoted_args)
9d4660ca
PH
3561
3562
3563def smuggle_url(url, data):
3564 """ Pass additional data in a URL for internal use. """
3565
81953d1a
RA
3566 url, idata = unsmuggle_url(url, {})
3567 data.update(idata)
15707c7e 3568 sdata = compat_urllib_parse_urlencode(
28e614de
PH
3569 {'__youtubedl_smuggle': json.dumps(data)})
3570 return url + '#' + sdata
9d4660ca
PH
3571
3572
79f82953 3573def unsmuggle_url(smug_url, default=None):
83e865a3 3574 if '#__youtubedl_smuggle' not in smug_url:
79f82953 3575 return smug_url, default
28e614de
PH
3576 url, _, sdata = smug_url.rpartition('#')
3577 jsond = compat_parse_qs(sdata)['__youtubedl_smuggle'][0]
9d4660ca
PH
3578 data = json.loads(jsond)
3579 return url, data
02dbf93f
PH
3580
3581
02dbf93f
PH
3582def format_bytes(bytes):
3583 if bytes is None:
28e614de 3584 return 'N/A'
02dbf93f
PH
3585 if type(bytes) is str:
3586 bytes = float(bytes)
3587 if bytes == 0.0:
3588 exponent = 0
3589 else:
3590 exponent = int(math.log(bytes, 1024.0))
28e614de 3591 suffix = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'][exponent]
02dbf93f 3592 converted = float(bytes) / float(1024 ** exponent)
28e614de 3593 return '%.2f%s' % (converted, suffix)
f53c966a 3594
1c088fa8 3595
fb47597b
S
3596def lookup_unit_table(unit_table, s):
3597 units_re = '|'.join(re.escape(u) for u in unit_table)
3598 m = re.match(
782b1b5b 3599 r'(?P<num>[0-9]+(?:[,.][0-9]*)?)\s*(?P<unit>%s)\b' % units_re, s)
fb47597b
S
3600 if not m:
3601 return None
3602 num_str = m.group('num').replace(',', '.')
3603 mult = unit_table[m.group('unit')]
3604 return int(float(num_str) * mult)
3605
3606
be64b5b0
PH
3607def parse_filesize(s):
3608 if s is None:
3609 return None
3610
dfb1b146 3611 # The lower-case forms are of course incorrect and unofficial,
be64b5b0
PH
3612 # but we support those too
3613 _UNIT_TABLE = {
3614 'B': 1,
3615 'b': 1,
70852b47 3616 'bytes': 1,
be64b5b0
PH
3617 'KiB': 1024,
3618 'KB': 1000,
3619 'kB': 1024,
3620 'Kb': 1000,
13585d76 3621 'kb': 1000,
70852b47
YCH
3622 'kilobytes': 1000,
3623 'kibibytes': 1024,
be64b5b0
PH
3624 'MiB': 1024 ** 2,
3625 'MB': 1000 ** 2,
3626 'mB': 1024 ** 2,
3627 'Mb': 1000 ** 2,
13585d76 3628 'mb': 1000 ** 2,
70852b47
YCH
3629 'megabytes': 1000 ** 2,
3630 'mebibytes': 1024 ** 2,
be64b5b0
PH
3631 'GiB': 1024 ** 3,
3632 'GB': 1000 ** 3,
3633 'gB': 1024 ** 3,
3634 'Gb': 1000 ** 3,
13585d76 3635 'gb': 1000 ** 3,
70852b47
YCH
3636 'gigabytes': 1000 ** 3,
3637 'gibibytes': 1024 ** 3,
be64b5b0
PH
3638 'TiB': 1024 ** 4,
3639 'TB': 1000 ** 4,
3640 'tB': 1024 ** 4,
3641 'Tb': 1000 ** 4,
13585d76 3642 'tb': 1000 ** 4,
70852b47
YCH
3643 'terabytes': 1000 ** 4,
3644 'tebibytes': 1024 ** 4,
be64b5b0
PH
3645 'PiB': 1024 ** 5,
3646 'PB': 1000 ** 5,
3647 'pB': 1024 ** 5,
3648 'Pb': 1000 ** 5,
13585d76 3649 'pb': 1000 ** 5,
70852b47
YCH
3650 'petabytes': 1000 ** 5,
3651 'pebibytes': 1024 ** 5,
be64b5b0
PH
3652 'EiB': 1024 ** 6,
3653 'EB': 1000 ** 6,
3654 'eB': 1024 ** 6,
3655 'Eb': 1000 ** 6,
13585d76 3656 'eb': 1000 ** 6,
70852b47
YCH
3657 'exabytes': 1000 ** 6,
3658 'exbibytes': 1024 ** 6,
be64b5b0
PH
3659 'ZiB': 1024 ** 7,
3660 'ZB': 1000 ** 7,
3661 'zB': 1024 ** 7,
3662 'Zb': 1000 ** 7,
13585d76 3663 'zb': 1000 ** 7,
70852b47
YCH
3664 'zettabytes': 1000 ** 7,
3665 'zebibytes': 1024 ** 7,
be64b5b0
PH
3666 'YiB': 1024 ** 8,
3667 'YB': 1000 ** 8,
3668 'yB': 1024 ** 8,
3669 'Yb': 1000 ** 8,
13585d76 3670 'yb': 1000 ** 8,
70852b47
YCH
3671 'yottabytes': 1000 ** 8,
3672 'yobibytes': 1024 ** 8,
be64b5b0
PH
3673 }
3674
fb47597b
S
3675 return lookup_unit_table(_UNIT_TABLE, s)
3676
3677
3678def parse_count(s):
3679 if s is None:
be64b5b0
PH
3680 return None
3681
fb47597b
S
3682 s = s.strip()
3683
3684 if re.match(r'^[\d,.]+$', s):
3685 return str_to_int(s)
3686
3687 _UNIT_TABLE = {
3688 'k': 1000,
3689 'K': 1000,
3690 'm': 1000 ** 2,
3691 'M': 1000 ** 2,
3692 'kk': 1000 ** 2,
3693 'KK': 1000 ** 2,
3694 }
be64b5b0 3695
fb47597b 3696 return lookup_unit_table(_UNIT_TABLE, s)
be64b5b0 3697
2f7ae819 3698
b871d7e9
S
3699def parse_resolution(s):
3700 if s is None:
3701 return {}
3702
3703 mobj = re.search(r'\b(?P<w>\d+)\s*[xX×]\s*(?P<h>\d+)\b', s)
3704 if mobj:
3705 return {
3706 'width': int(mobj.group('w')),
3707 'height': int(mobj.group('h')),
3708 }
3709
3710 mobj = re.search(r'\b(\d+)[pPiI]\b', s)
3711 if mobj:
3712 return {'height': int(mobj.group(1))}
3713
3714 mobj = re.search(r'\b([48])[kK]\b', s)
3715 if mobj:
3716 return {'height': int(mobj.group(1)) * 540}
3717
3718 return {}
3719
3720
0dc41787
S
3721def parse_bitrate(s):
3722 if not isinstance(s, compat_str):
3723 return
3724 mobj = re.search(r'\b(\d+)\s*kbps', s)
3725 if mobj:
3726 return int(mobj.group(1))
3727
3728
a942d6cb 3729def month_by_name(name, lang='en'):
caefb1de
PH
3730 """ Return the number of a month by (locale-independently) English name """
3731
f6717dec 3732 month_names = MONTH_NAMES.get(lang, MONTH_NAMES['en'])
a942d6cb 3733
caefb1de 3734 try:
f6717dec 3735 return month_names.index(name) + 1
7105440c
YCH
3736 except ValueError:
3737 return None
3738
3739
3740def month_by_abbreviation(abbrev):
3741 """ Return the number of a month by (locale-independently) English
3742 abbreviations """
3743
3744 try:
3745 return [s[:3] for s in ENGLISH_MONTH_NAMES].index(abbrev) + 1
caefb1de
PH
3746 except ValueError:
3747 return None
18258362
JMF
3748
3749
5aafe895 3750def fix_xml_ampersands(xml_str):
18258362 3751 """Replace all the '&' by '&amp;' in XML"""
5aafe895
PH
3752 return re.sub(
3753 r'&(?!amp;|lt;|gt;|apos;|quot;|#x[0-9a-fA-F]{,4};|#[0-9]{,4};)',
28e614de 3754 '&amp;',
5aafe895 3755 xml_str)
e3946f98
PH
3756
3757
3758def setproctitle(title):
8bf48f23 3759 assert isinstance(title, compat_str)
c1c05c67
YCH
3760
3761 # ctypes in Jython is not complete
3762 # http://bugs.jython.org/issue2148
3763 if sys.platform.startswith('java'):
3764 return
3765
e3946f98 3766 try:
611c1dd9 3767 libc = ctypes.cdll.LoadLibrary('libc.so.6')
e3946f98
PH
3768 except OSError:
3769 return
2f49bcd6
RC
3770 except TypeError:
3771 # LoadLibrary in Windows Python 2.7.13 only expects
3772 # a bytestring, but since unicode_literals turns
3773 # every string into a unicode string, it fails.
3774 return
6eefe533
PH
3775 title_bytes = title.encode('utf-8')
3776 buf = ctypes.create_string_buffer(len(title_bytes))
3777 buf.value = title_bytes
e3946f98 3778 try:
6eefe533 3779 libc.prctl(15, buf, 0, 0, 0)
e3946f98
PH
3780 except AttributeError:
3781 return # Strange libc, just skip this
d7dda168
PH
3782
3783
3784def remove_start(s, start):
46bc9b7d 3785 return s[len(start):] if s is not None and s.startswith(start) else s
29eb5174
PH
3786
3787
2b9faf55 3788def remove_end(s, end):
46bc9b7d 3789 return s[:-len(end)] if s is not None and s.endswith(end) else s
2b9faf55
PH
3790
3791
31b2051e
S
3792def remove_quotes(s):
3793 if s is None or len(s) < 2:
3794 return s
3795 for quote in ('"', "'", ):
3796 if s[0] == quote and s[-1] == quote:
3797 return s[1:-1]
3798 return s
3799
3800
b6e0c7d2
U
3801def get_domain(url):
3802 domain = re.match(r'(?:https?:\/\/)?(?:www\.)?(?P<domain>[^\n\/]+\.[^\n\/]+)(?:\/(.*))?', url)
3803 return domain.group('domain') if domain else None
3804
3805
29eb5174 3806def url_basename(url):
9b8aaeed 3807 path = compat_urlparse.urlparse(url).path
28e614de 3808 return path.strip('/').split('/')[-1]
aa94a6d3
PH
3809
3810
02dc0a36
S
3811def base_url(url):
3812 return re.match(r'https?://[^?#&]+/', url).group()
3813
3814
e34c3361 3815def urljoin(base, path):
4b5de77b
S
3816 if isinstance(path, bytes):
3817 path = path.decode('utf-8')
e34c3361
S
3818 if not isinstance(path, compat_str) or not path:
3819 return None
fad4ceb5 3820 if re.match(r'^(?:[a-zA-Z][a-zA-Z0-9+-.]*:)?//', path):
e34c3361 3821 return path
4b5de77b
S
3822 if isinstance(base, bytes):
3823 base = base.decode('utf-8')
3824 if not isinstance(base, compat_str) or not re.match(
3825 r'^(?:https?:)?//', base):
e34c3361
S
3826 return None
3827 return compat_urlparse.urljoin(base, path)
3828
3829
aa94a6d3
PH
3830class HEADRequest(compat_urllib_request.Request):
3831 def get_method(self):
611c1dd9 3832 return 'HEAD'
7217e148
PH
3833
3834
95cf60e8
S
3835class PUTRequest(compat_urllib_request.Request):
3836 def get_method(self):
3837 return 'PUT'
3838
3839
9732d77e 3840def int_or_none(v, scale=1, default=None, get_attr=None, invscale=1):
28746fbd
PH
3841 if get_attr:
3842 if v is not None:
3843 v = getattr(v, get_attr, None)
9572013d
PH
3844 if v == '':
3845 v = None
1812afb7
S
3846 if v is None:
3847 return default
3848 try:
3849 return int(v) * invscale // scale
5e1271c5 3850 except (ValueError, TypeError):
af98f8ff 3851 return default
9732d77e 3852
9572013d 3853
40a90862
JMF
3854def str_or_none(v, default=None):
3855 return default if v is None else compat_str(v)
3856
9732d77e
PH
3857
3858def str_to_int(int_str):
48d4681e 3859 """ A more relaxed version of int_or_none """
42db58ec 3860 if isinstance(int_str, compat_integer_types):
348c6bf1 3861 return int_str
42db58ec
S
3862 elif isinstance(int_str, compat_str):
3863 int_str = re.sub(r'[,\.\+]', '', int_str)
3864 return int_or_none(int_str)
608d11f5
PH
3865
3866
9732d77e 3867def float_or_none(v, scale=1, invscale=1, default=None):
caf80631
S
3868 if v is None:
3869 return default
3870 try:
3871 return float(v) * invscale / scale
5e1271c5 3872 except (ValueError, TypeError):
caf80631 3873 return default
43f775e4
PH
3874
3875
c7e327c4
S
3876def bool_or_none(v, default=None):
3877 return v if isinstance(v, bool) else default
3878
3879
53cd37ba
S
3880def strip_or_none(v, default=None):
3881 return v.strip() if isinstance(v, compat_str) else default
b72b4431
S
3882
3883
af03000a
S
3884def url_or_none(url):
3885 if not url or not isinstance(url, compat_str):
3886 return None
3887 url = url.strip()
29f7c58a 3888 return url if re.match(r'^(?:(?:https?|rt(?:m(?:pt?[es]?|fp)|sp[su]?)|mms|ftps?):)?//', url) else None
af03000a
S
3889
3890
e29663c6 3891def strftime_or_none(timestamp, date_format, default=None):
3892 datetime_object = None
3893 try:
3894 if isinstance(timestamp, compat_numeric_types): # unix timestamp
3895 datetime_object = datetime.datetime.utcfromtimestamp(timestamp)
3896 elif isinstance(timestamp, compat_str): # assume YYYYMMDD
3897 datetime_object = datetime.datetime.strptime(timestamp, '%Y%m%d')
3898 return datetime_object.strftime(date_format)
3899 except (ValueError, TypeError, AttributeError):
3900 return default
3901
3902
608d11f5 3903def parse_duration(s):
8f9312c3 3904 if not isinstance(s, compat_basestring):
608d11f5
PH
3905 return None
3906
ca7b3246
S
3907 s = s.strip()
3908
acaff495 3909 days, hours, mins, secs, ms = [None] * 5
15846398 3910 m = re.match(r'(?:(?:(?:(?P<days>[0-9]+):)?(?P<hours>[0-9]+):)?(?P<mins>[0-9]+):)?(?P<secs>[0-9]+)(?P<ms>\.[0-9]+)?Z?$', s)
acaff495 3911 if m:
3912 days, hours, mins, secs, ms = m.groups()
3913 else:
3914 m = re.match(
056653bb
S
3915 r'''(?ix)(?:P?
3916 (?:
3917 [0-9]+\s*y(?:ears?)?\s*
3918 )?
3919 (?:
3920 [0-9]+\s*m(?:onths?)?\s*
3921 )?
3922 (?:
3923 [0-9]+\s*w(?:eeks?)?\s*
3924 )?
8f4b58d7 3925 (?:
acaff495 3926 (?P<days>[0-9]+)\s*d(?:ays?)?\s*
8f4b58d7 3927 )?
056653bb 3928 T)?
acaff495 3929 (?:
3930 (?P<hours>[0-9]+)\s*h(?:ours?)?\s*
3931 )?
3932 (?:
3933 (?P<mins>[0-9]+)\s*m(?:in(?:ute)?s?)?\s*
3934 )?
3935 (?:
3936 (?P<secs>[0-9]+)(?P<ms>\.[0-9]+)?\s*s(?:ec(?:ond)?s?)?\s*
15846398 3937 )?Z?$''', s)
acaff495 3938 if m:
3939 days, hours, mins, secs, ms = m.groups()
3940 else:
15846398 3941 m = re.match(r'(?i)(?:(?P<hours>[0-9.]+)\s*(?:hours?)|(?P<mins>[0-9.]+)\s*(?:mins?\.?|minutes?)\s*)Z?$', s)
acaff495 3942 if m:
3943 hours, mins = m.groups()
3944 else:
3945 return None
3946
3947 duration = 0
3948 if secs:
3949 duration += float(secs)
3950 if mins:
3951 duration += float(mins) * 60
3952 if hours:
3953 duration += float(hours) * 60 * 60
3954 if days:
3955 duration += float(days) * 24 * 60 * 60
3956 if ms:
3957 duration += float(ms)
3958 return duration
91d7d0b3
JMF
3959
3960
e65e4c88 3961def prepend_extension(filename, ext, expected_real_ext=None):
5f6a1245 3962 name, real_ext = os.path.splitext(filename)
e65e4c88
S
3963 return (
3964 '{0}.{1}{2}'.format(name, ext, real_ext)
3965 if not expected_real_ext or real_ext[1:] == expected_real_ext
3966 else '{0}.{1}'.format(filename, ext))
d70ad093
PH
3967
3968
b3ed15b7
S
3969def replace_extension(filename, ext, expected_real_ext=None):
3970 name, real_ext = os.path.splitext(filename)
3971 return '{0}.{1}'.format(
3972 name if not expected_real_ext or real_ext[1:] == expected_real_ext else filename,
3973 ext)
3974
3975
d70ad093
PH
3976def check_executable(exe, args=[]):
3977 """ Checks if the given binary is installed somewhere in PATH, and returns its name.
3978 args can be a list of arguments for a short output (like -version) """
3979 try:
f5b1bca9 3980 process_communicate_or_kill(subprocess.Popen(
3981 [exe] + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE))
d70ad093
PH
3982 except OSError:
3983 return False
3984 return exe
b7ab0590
PH
3985
3986
95807118 3987def get_exe_version(exe, args=['--version'],
cae97f65 3988 version_re=None, unrecognized='present'):
95807118
PH
3989 """ Returns the version of the specified executable,
3990 or False if the executable is not present """
3991 try:
b64d04c1 3992 # STDIN should be redirected too. On UNIX-like systems, ffmpeg triggers
7a5c1cfe 3993 # SIGTTOU if yt-dlp is run in the background.
067aa17e 3994 # See https://github.com/ytdl-org/youtube-dl/issues/955#issuecomment-209789656
f5b1bca9 3995 out, _ = process_communicate_or_kill(subprocess.Popen(
54116803 3996 [encodeArgument(exe)] + args,
00ca7552 3997 stdin=subprocess.PIPE,
f5b1bca9 3998 stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
95807118
PH
3999 except OSError:
4000 return False
cae97f65
PH
4001 if isinstance(out, bytes): # Python 2.x
4002 out = out.decode('ascii', 'ignore')
4003 return detect_exe_version(out, version_re, unrecognized)
4004
4005
4006def detect_exe_version(output, version_re=None, unrecognized='present'):
4007 assert isinstance(output, compat_str)
4008 if version_re is None:
4009 version_re = r'version\s+([-0-9._a-zA-Z]+)'
4010 m = re.search(version_re, output)
95807118
PH
4011 if m:
4012 return m.group(1)
4013 else:
4014 return unrecognized
4015
4016
cb89cfc1 4017class LazyList(collections.abc.Sequence):
483336e7 4018 ''' Lazy immutable list from an iterable
4019 Note that slices of a LazyList are lists and not LazyList'''
4020
8e5fecc8 4021 class IndexError(IndexError):
4022 pass
4023
483336e7 4024 def __init__(self, iterable):
4025 self.__iterable = iter(iterable)
4026 self.__cache = []
28419ca2 4027 self.__reversed = False
483336e7 4028
4029 def __iter__(self):
28419ca2 4030 if self.__reversed:
4031 # We need to consume the entire iterable to iterate in reverse
981052c9 4032 yield from self.exhaust()
28419ca2 4033 return
4034 yield from self.__cache
483336e7 4035 for item in self.__iterable:
4036 self.__cache.append(item)
4037 yield item
4038
981052c9 4039 def __exhaust(self):
483336e7 4040 self.__cache.extend(self.__iterable)
28419ca2 4041 return self.__cache
4042
981052c9 4043 def exhaust(self):
4044 ''' Evaluate the entire iterable '''
4045 return self.__exhaust()[::-1 if self.__reversed else 1]
4046
28419ca2 4047 @staticmethod
981052c9 4048 def __reverse_index(x):
e0f2b4b4 4049 return None if x is None else -(x + 1)
483336e7 4050
4051 def __getitem__(self, idx):
4052 if isinstance(idx, slice):
28419ca2 4053 if self.__reversed:
e0f2b4b4 4054 idx = slice(self.__reverse_index(idx.start), self.__reverse_index(idx.stop), -(idx.step or 1))
4055 start, stop, step = idx.start, idx.stop, idx.step or 1
483336e7 4056 elif isinstance(idx, int):
28419ca2 4057 if self.__reversed:
981052c9 4058 idx = self.__reverse_index(idx)
e0f2b4b4 4059 start, stop, step = idx, idx, 0
483336e7 4060 else:
4061 raise TypeError('indices must be integers or slices')
e0f2b4b4 4062 if ((start or 0) < 0 or (stop or 0) < 0
4063 or (start is None and step < 0)
4064 or (stop is None and step > 0)):
483336e7 4065 # We need to consume the entire iterable to be able to slice from the end
4066 # Obviously, never use this with infinite iterables
8e5fecc8 4067 self.__exhaust()
4068 try:
4069 return self.__cache[idx]
4070 except IndexError as e:
4071 raise self.IndexError(e) from e
e0f2b4b4 4072 n = max(start or 0, stop or 0) - len(self.__cache) + 1
28419ca2 4073 if n > 0:
4074 self.__cache.extend(itertools.islice(self.__iterable, n))
8e5fecc8 4075 try:
4076 return self.__cache[idx]
4077 except IndexError as e:
4078 raise self.IndexError(e) from e
483336e7 4079
4080 def __bool__(self):
4081 try:
28419ca2 4082 self[-1] if self.__reversed else self[0]
8e5fecc8 4083 except self.IndexError:
483336e7 4084 return False
4085 return True
4086
4087 def __len__(self):
8e5fecc8 4088 self.__exhaust()
483336e7 4089 return len(self.__cache)
4090
981052c9 4091 def reverse(self):
28419ca2 4092 self.__reversed = not self.__reversed
4093 return self
4094
4095 def __repr__(self):
4096 # repr and str should mimic a list. So we exhaust the iterable
4097 return repr(self.exhaust())
4098
4099 def __str__(self):
4100 return repr(self.exhaust())
4101
483336e7 4102
7be9ccff 4103class PagedList:
dd26ced1
PH
4104 def __len__(self):
4105 # This is only useful for tests
4106 return len(self.getslice())
4107
7be9ccff 4108 def __init__(self, pagefunc, pagesize, use_cache=True):
4109 self._pagefunc = pagefunc
4110 self._pagesize = pagesize
4111 self._use_cache = use_cache
4112 self._cache = {}
4113
4114 def getpage(self, pagenum):
4115 page_results = self._cache.get(pagenum) or list(self._pagefunc(pagenum))
4116 if self._use_cache:
4117 self._cache[pagenum] = page_results
4118 return page_results
4119
4120 def getslice(self, start=0, end=None):
4121 return list(self._getslice(start, end))
4122
4123 def _getslice(self, start, end):
55575225 4124 raise NotImplementedError('This method must be implemented by subclasses')
4125
4126 def __getitem__(self, idx):
7be9ccff 4127 # NOTE: cache must be enabled if this is used
55575225 4128 if not isinstance(idx, int) or idx < 0:
4129 raise TypeError('indices must be non-negative integers')
4130 entries = self.getslice(idx, idx + 1)
4131 return entries[0] if entries else None
4132
9c44d242
PH
4133
4134class OnDemandPagedList(PagedList):
7be9ccff 4135 def _getslice(self, start, end):
b7ab0590
PH
4136 for pagenum in itertools.count(start // self._pagesize):
4137 firstid = pagenum * self._pagesize
4138 nextfirstid = pagenum * self._pagesize + self._pagesize
4139 if start >= nextfirstid:
4140 continue
4141
b7ab0590
PH
4142 startv = (
4143 start % self._pagesize
4144 if firstid <= start < nextfirstid
4145 else 0)
b7ab0590
PH
4146 endv = (
4147 ((end - 1) % self._pagesize) + 1
4148 if (end is not None and firstid <= end <= nextfirstid)
4149 else None)
4150
7be9ccff 4151 page_results = self.getpage(pagenum)
b7ab0590
PH
4152 if startv != 0 or endv is not None:
4153 page_results = page_results[startv:endv]
7be9ccff 4154 yield from page_results
b7ab0590
PH
4155
4156 # A little optimization - if current page is not "full", ie. does
4157 # not contain page_size videos then we can assume that this page
4158 # is the last one - there are no more ids on further pages -
4159 # i.e. no need to query again.
4160 if len(page_results) + startv < self._pagesize:
4161 break
4162
4163 # If we got the whole page, but the next page is not interesting,
4164 # break out early as well
4165 if end == nextfirstid:
4166 break
81c2f20b
PH
4167
4168
9c44d242
PH
4169class InAdvancePagedList(PagedList):
4170 def __init__(self, pagefunc, pagecount, pagesize):
9c44d242 4171 self._pagecount = pagecount
7be9ccff 4172 PagedList.__init__(self, pagefunc, pagesize, True)
9c44d242 4173
7be9ccff 4174 def _getslice(self, start, end):
9c44d242
PH
4175 start_page = start // self._pagesize
4176 end_page = (
4177 self._pagecount if end is None else (end // self._pagesize + 1))
4178 skip_elems = start - start_page * self._pagesize
4179 only_more = None if end is None else end - start
4180 for pagenum in range(start_page, end_page):
7be9ccff 4181 page_results = self.getpage(pagenum)
9c44d242 4182 if skip_elems:
7be9ccff 4183 page_results = page_results[skip_elems:]
9c44d242
PH
4184 skip_elems = None
4185 if only_more is not None:
7be9ccff 4186 if len(page_results) < only_more:
4187 only_more -= len(page_results)
9c44d242 4188 else:
7be9ccff 4189 yield from page_results[:only_more]
9c44d242 4190 break
7be9ccff 4191 yield from page_results
9c44d242
PH
4192
4193
81c2f20b 4194def uppercase_escape(s):
676eb3f2 4195 unicode_escape = codecs.getdecoder('unicode_escape')
81c2f20b 4196 return re.sub(
a612753d 4197 r'\\U[0-9a-fA-F]{8}',
676eb3f2
PH
4198 lambda m: unicode_escape(m.group(0))[0],
4199 s)
0fe2ff78
YCH
4200
4201
4202def lowercase_escape(s):
4203 unicode_escape = codecs.getdecoder('unicode_escape')
4204 return re.sub(
4205 r'\\u[0-9a-fA-F]{4}',
4206 lambda m: unicode_escape(m.group(0))[0],
4207 s)
b53466e1 4208
d05cfe06
S
4209
4210def escape_rfc3986(s):
4211 """Escape non-ASCII characters as suggested by RFC 3986"""
8f9312c3 4212 if sys.version_info < (3, 0) and isinstance(s, compat_str):
d05cfe06 4213 s = s.encode('utf-8')
ecc0c5ee 4214 return compat_urllib_parse.quote(s, b"%/;:@&=+$,!~*'()?#[]")
d05cfe06
S
4215
4216
4217def escape_url(url):
4218 """Escape URL as suggested by RFC 3986"""
4219 url_parsed = compat_urllib_parse_urlparse(url)
4220 return url_parsed._replace(
efbed08d 4221 netloc=url_parsed.netloc.encode('idna').decode('ascii'),
d05cfe06
S
4222 path=escape_rfc3986(url_parsed.path),
4223 params=escape_rfc3986(url_parsed.params),
4224 query=escape_rfc3986(url_parsed.query),
4225 fragment=escape_rfc3986(url_parsed.fragment)
4226 ).geturl()
4227
62e609ab 4228
4dfbf869 4229def parse_qs(url):
4230 return compat_parse_qs(compat_urllib_parse_urlparse(url).query)
4231
4232
62e609ab
PH
4233def read_batch_urls(batch_fd):
4234 def fixup(url):
4235 if not isinstance(url, compat_str):
4236 url = url.decode('utf-8', 'replace')
8c04f0be 4237 BOM_UTF8 = ('\xef\xbb\xbf', '\ufeff')
4238 for bom in BOM_UTF8:
4239 if url.startswith(bom):
4240 url = url[len(bom):]
4241 url = url.lstrip()
4242 if not url or url.startswith(('#', ';', ']')):
62e609ab 4243 return False
8c04f0be 4244 # "#" cannot be stripped out since it is part of the URI
4245 # However, it can be safely stipped out if follwing a whitespace
4246 return re.split(r'\s#', url, 1)[0].rstrip()
62e609ab
PH
4247
4248 with contextlib.closing(batch_fd) as fd:
4249 return [url for url in map(fixup, fd) if url]
b74fa8cd
JMF
4250
4251
4252def urlencode_postdata(*args, **kargs):
15707c7e 4253 return compat_urllib_parse_urlencode(*args, **kargs).encode('ascii')
bcf89ce6
PH
4254
4255
38f9ef31 4256def update_url_query(url, query):
cacd9966
YCH
4257 if not query:
4258 return url
38f9ef31 4259 parsed_url = compat_urlparse.urlparse(url)
4260 qs = compat_parse_qs(parsed_url.query)
4261 qs.update(query)
4262 return compat_urlparse.urlunparse(parsed_url._replace(
15707c7e 4263 query=compat_urllib_parse_urlencode(qs, True)))
16392824 4264
8e60dc75 4265
ed0291d1
S
4266def update_Request(req, url=None, data=None, headers={}, query={}):
4267 req_headers = req.headers.copy()
4268 req_headers.update(headers)
4269 req_data = data or req.data
4270 req_url = update_url_query(url or req.get_full_url(), query)
95cf60e8
S
4271 req_get_method = req.get_method()
4272 if req_get_method == 'HEAD':
4273 req_type = HEADRequest
4274 elif req_get_method == 'PUT':
4275 req_type = PUTRequest
4276 else:
4277 req_type = compat_urllib_request.Request
ed0291d1
S
4278 new_req = req_type(
4279 req_url, data=req_data, headers=req_headers,
4280 origin_req_host=req.origin_req_host, unverifiable=req.unverifiable)
4281 if hasattr(req, 'timeout'):
4282 new_req.timeout = req.timeout
4283 return new_req
4284
4285
10c87c15 4286def _multipart_encode_impl(data, boundary):
0c265486
YCH
4287 content_type = 'multipart/form-data; boundary=%s' % boundary
4288
4289 out = b''
4290 for k, v in data.items():
4291 out += b'--' + boundary.encode('ascii') + b'\r\n'
4292 if isinstance(k, compat_str):
4293 k = k.encode('utf-8')
4294 if isinstance(v, compat_str):
4295 v = v.encode('utf-8')
4296 # RFC 2047 requires non-ASCII field names to be encoded, while RFC 7578
4297 # suggests sending UTF-8 directly. Firefox sends UTF-8, too
b2ad479d 4298 content = b'Content-Disposition: form-data; name="' + k + b'"\r\n\r\n' + v + b'\r\n'
0c265486
YCH
4299 if boundary.encode('ascii') in content:
4300 raise ValueError('Boundary overlaps with data')
4301 out += content
4302
4303 out += b'--' + boundary.encode('ascii') + b'--\r\n'
4304
4305 return out, content_type
4306
4307
4308def multipart_encode(data, boundary=None):
4309 '''
4310 Encode a dict to RFC 7578-compliant form-data
4311
4312 data:
4313 A dict where keys and values can be either Unicode or bytes-like
4314 objects.
4315 boundary:
4316 If specified a Unicode object, it's used as the boundary. Otherwise
4317 a random boundary is generated.
4318
4319 Reference: https://tools.ietf.org/html/rfc7578
4320 '''
4321 has_specified_boundary = boundary is not None
4322
4323 while True:
4324 if boundary is None:
4325 boundary = '---------------' + str(random.randrange(0x0fffffff, 0xffffffff))
4326
4327 try:
10c87c15 4328 out, content_type = _multipart_encode_impl(data, boundary)
0c265486
YCH
4329 break
4330 except ValueError:
4331 if has_specified_boundary:
4332 raise
4333 boundary = None
4334
4335 return out, content_type
4336
4337
86296ad2 4338def dict_get(d, key_or_keys, default=None, skip_false_values=True):
cbecc9b9
S
4339 if isinstance(key_or_keys, (list, tuple)):
4340 for key in key_or_keys:
86296ad2
S
4341 if key not in d or d[key] is None or skip_false_values and not d[key]:
4342 continue
4343 return d[key]
cbecc9b9
S
4344 return default
4345 return d.get(key_or_keys, default)
4346
4347
329ca3be 4348def try_get(src, getter, expected_type=None):
6606817a 4349 for get in variadic(getter):
a32a9a7e
S
4350 try:
4351 v = get(src)
4352 except (AttributeError, KeyError, TypeError, IndexError):
4353 pass
4354 else:
4355 if expected_type is None or isinstance(v, expected_type):
4356 return v
329ca3be
S
4357
4358
6cc62232
S
4359def merge_dicts(*dicts):
4360 merged = {}
4361 for a_dict in dicts:
4362 for k, v in a_dict.items():
4363 if v is None:
4364 continue
3089bc74
S
4365 if (k not in merged
4366 or (isinstance(v, compat_str) and v
4367 and isinstance(merged[k], compat_str)
4368 and not merged[k])):
6cc62232
S
4369 merged[k] = v
4370 return merged
4371
4372
8e60dc75
S
4373def encode_compat_str(string, encoding=preferredencoding(), errors='strict'):
4374 return string if isinstance(string, compat_str) else compat_str(string, encoding, errors)
4375
16392824 4376
a1a530b0
PH
4377US_RATINGS = {
4378 'G': 0,
4379 'PG': 10,
4380 'PG-13': 13,
4381 'R': 16,
4382 'NC': 18,
4383}
fac55558
PH
4384
4385
a8795327 4386TV_PARENTAL_GUIDELINES = {
5a16c9d9
RA
4387 'TV-Y': 0,
4388 'TV-Y7': 7,
4389 'TV-G': 0,
4390 'TV-PG': 0,
4391 'TV-14': 14,
4392 'TV-MA': 17,
a8795327
S
4393}
4394
4395
146c80e2 4396def parse_age_limit(s):
a8795327
S
4397 if type(s) == int:
4398 return s if 0 <= s <= 21 else None
4399 if not isinstance(s, compat_basestring):
d838b1bd 4400 return None
146c80e2 4401 m = re.match(r'^(?P<age>\d{1,2})\+?$', s)
a8795327
S
4402 if m:
4403 return int(m.group('age'))
5c5fae6d 4404 s = s.upper()
a8795327
S
4405 if s in US_RATINGS:
4406 return US_RATINGS[s]
5a16c9d9 4407 m = re.match(r'^TV[_-]?(%s)$' % '|'.join(k[3:] for k in TV_PARENTAL_GUIDELINES), s)
b8361187 4408 if m:
5a16c9d9 4409 return TV_PARENTAL_GUIDELINES['TV-' + m.group(1)]
b8361187 4410 return None
146c80e2
S
4411
4412
fac55558 4413def strip_jsonp(code):
609a61e3 4414 return re.sub(
5552c9eb 4415 r'''(?sx)^
e9c671d5 4416 (?:window\.)?(?P<func_name>[a-zA-Z0-9_.$]*)
5552c9eb
YCH
4417 (?:\s*&&\s*(?P=func_name))?
4418 \s*\(\s*(?P<callback_data>.*)\);?
4419 \s*?(?://[^\n]*)*$''',
4420 r'\g<callback_data>', code)
478c2c61
PH
4421
4422
5c610515 4423def js_to_json(code, vars={}):
4424 # vars is a dict of var, val pairs to substitute
c843e685 4425 COMMENT_RE = r'/\*(?:(?!\*/).)*?\*/|//[^\n]*\n'
4195096e
S
4426 SKIP_RE = r'\s*(?:{comment})?\s*'.format(comment=COMMENT_RE)
4427 INTEGER_TABLE = (
4428 (r'(?s)^(0[xX][0-9a-fA-F]+){skip}:?$'.format(skip=SKIP_RE), 16),
4429 (r'(?s)^(0+[0-7]+){skip}:?$'.format(skip=SKIP_RE), 8),
4430 )
4431
e05f6939 4432 def fix_kv(m):
e7b6d122
PH
4433 v = m.group(0)
4434 if v in ('true', 'false', 'null'):
4435 return v
421ddcb8
C
4436 elif v in ('undefined', 'void 0'):
4437 return 'null'
8bdd16b4 4438 elif v.startswith('/*') or v.startswith('//') or v.startswith('!') or v == ',':
bd1e4844 4439 return ""
4440
4441 if v[0] in ("'", '"'):
4442 v = re.sub(r'(?s)\\.|"', lambda m: {
e7b6d122 4443 '"': '\\"',
bd1e4844 4444 "\\'": "'",
4445 '\\\n': '',
4446 '\\x': '\\u00',
4447 }.get(m.group(0), m.group(0)), v[1:-1])
8bdd16b4 4448 else:
4449 for regex, base in INTEGER_TABLE:
4450 im = re.match(regex, v)
4451 if im:
4452 i = int(im.group(1), base)
4453 return '"%d":' % i if v.endswith(':') else '%d' % i
89ac4a19 4454
5c610515 4455 if v in vars:
4456 return vars[v]
4457
e7b6d122 4458 return '"%s"' % v
e05f6939 4459
bd1e4844 4460 return re.sub(r'''(?sx)
4461 "(?:[^"\\]*(?:\\\\|\\['"nurtbfx/\n]))*[^"\\]*"|
4462 '(?:[^'\\]*(?:\\\\|\\['"nurtbfx/\n]))*[^'\\]*'|
4195096e 4463 {comment}|,(?={skip}[\]}}])|
421ddcb8 4464 void\s0|(?:(?<![0-9])[eE]|[a-df-zA-DF-Z_$])[.a-zA-Z_$0-9]*|
4195096e 4465 \b(?:0[xX][0-9a-fA-F]+|0+[0-7]+)(?:{skip}:)?|
8bdd16b4 4466 [0-9]+(?={skip}:)|
4467 !+
4195096e 4468 '''.format(comment=COMMENT_RE, skip=SKIP_RE), fix_kv, code)
e05f6939
PH
4469
4470
478c2c61
PH
4471def qualities(quality_ids):
4472 """ Get a numeric quality value out of a list of possible values """
4473 def q(qid):
4474 try:
4475 return quality_ids.index(qid)
4476 except ValueError:
4477 return -1
4478 return q
4479
acd69589 4480
de6000d9 4481DEFAULT_OUTTMPL = {
4482 'default': '%(title)s [%(id)s].%(ext)s',
72755351 4483 'chapter': '%(title)s - %(section_number)03d %(section_title)s [%(id)s].%(ext)s',
de6000d9 4484}
4485OUTTMPL_TYPES = {
72755351 4486 'chapter': None,
de6000d9 4487 'subtitle': None,
4488 'thumbnail': None,
4489 'description': 'description',
4490 'annotation': 'annotations.xml',
4491 'infojson': 'info.json',
5112f26a 4492 'pl_thumbnail': None,
de6000d9 4493 'pl_description': 'description',
4494 'pl_infojson': 'info.json',
4495}
0a871f68 4496
143db31d 4497# As of [1] format syntax is:
4498# %[mapping_key][conversion_flags][minimum_width][.precision][length_modifier]type
4499# 1. https://docs.python.org/2/library/stdtypes.html#string-formatting
901130bb 4500STR_FORMAT_RE_TMPL = r'''(?x)
4501 (?<!%)(?P<prefix>(?:%%)*)
143db31d 4502 %
524e2e4f 4503 (?P<has_key>\((?P<key>{0})\))?
752cda38 4504 (?P<format>
524e2e4f 4505 (?P<conversion>[#0\-+ ]+)?
4506 (?P<min_width>\d+)?
4507 (?P<precision>\.\d+)?
4508 (?P<len_mod>[hlL])? # unused in python
901130bb 4509 {1} # conversion type
752cda38 4510 )
143db31d 4511'''
4512
7d1eb38a 4513
901130bb 4514STR_FORMAT_TYPES = 'diouxXeEfFgGcrs'
a020a0dc 4515
7d1eb38a 4516
a020a0dc
PH
4517def limit_length(s, length):
4518 """ Add ellipses to overly long strings """
4519 if s is None:
4520 return None
4521 ELLIPSES = '...'
4522 if len(s) > length:
4523 return s[:length - len(ELLIPSES)] + ELLIPSES
4524 return s
48844745
PH
4525
4526
4527def version_tuple(v):
5f9b8394 4528 return tuple(int(e) for e in re.split(r'[-.]', v))
48844745
PH
4529
4530
4531def is_outdated_version(version, limit, assume_new=True):
4532 if not version:
4533 return not assume_new
4534 try:
4535 return version_tuple(version) < version_tuple(limit)
4536 except ValueError:
4537 return not assume_new
732ea2f0
PH
4538
4539
4540def ytdl_is_updateable():
7a5c1cfe 4541 """ Returns if yt-dlp can be updated with -U """
735d865e 4542
5d535b4a 4543 from .update import is_non_updateable
732ea2f0 4544
5d535b4a 4545 return not is_non_updateable()
7d4111ed
PH
4546
4547
4548def args_to_str(args):
4549 # Get a short string representation for a subprocess command
702ccf2d 4550 return ' '.join(compat_shlex_quote(a) for a in args)
2ccd1b10
PH
4551
4552
9b9c5355 4553def error_to_compat_str(err):
fdae2358
S
4554 err_str = str(err)
4555 # On python 2 error byte string must be decoded with proper
4556 # encoding rather than ascii
4557 if sys.version_info[0] < 3:
4558 err_str = err_str.decode(preferredencoding())
4559 return err_str
4560
4561
c460bdd5 4562def mimetype2ext(mt):
eb9ee194
S
4563 if mt is None:
4564 return None
4565
9359f3d4
F
4566 mt, _, params = mt.partition(';')
4567 mt = mt.strip()
4568
4569 FULL_MAP = {
765ac263 4570 'audio/mp4': 'm4a',
6c33d24b
YCH
4571 # Per RFC 3003, audio/mpeg can be .mp1, .mp2 or .mp3. Here use .mp3 as
4572 # it's the most popular one
4573 'audio/mpeg': 'mp3',
ba39289d 4574 'audio/x-wav': 'wav',
9359f3d4
F
4575 'audio/wav': 'wav',
4576 'audio/wave': 'wav',
4577 }
4578
4579 ext = FULL_MAP.get(mt)
765ac263
JMF
4580 if ext is not None:
4581 return ext
4582
9359f3d4 4583 SUBTYPE_MAP = {
f6861ec9 4584 '3gpp': '3gp',
cafcf657 4585 'smptett+xml': 'tt',
cafcf657 4586 'ttaf+xml': 'dfxp',
a0d8d704 4587 'ttml+xml': 'ttml',
f6861ec9 4588 'x-flv': 'flv',
a0d8d704 4589 'x-mp4-fragmented': 'mp4',
d4f05d47 4590 'x-ms-sami': 'sami',
a0d8d704 4591 'x-ms-wmv': 'wmv',
b4173f15
RA
4592 'mpegurl': 'm3u8',
4593 'x-mpegurl': 'm3u8',
4594 'vnd.apple.mpegurl': 'm3u8',
4595 'dash+xml': 'mpd',
b4173f15 4596 'f4m+xml': 'f4m',
f164b971 4597 'hds+xml': 'f4m',
e910fe2f 4598 'vnd.ms-sstr+xml': 'ism',
c2b2c7e1 4599 'quicktime': 'mov',
98ce1a3f 4600 'mp2t': 'ts',
39e7107d 4601 'x-wav': 'wav',
9359f3d4
F
4602 'filmstrip+json': 'fs',
4603 'svg+xml': 'svg',
4604 }
4605
4606 _, _, subtype = mt.rpartition('/')
4607 ext = SUBTYPE_MAP.get(subtype.lower())
4608 if ext is not None:
4609 return ext
4610
4611 SUFFIX_MAP = {
4612 'json': 'json',
4613 'xml': 'xml',
4614 'zip': 'zip',
4615 'gzip': 'gz',
4616 }
4617
4618 _, _, suffix = subtype.partition('+')
4619 ext = SUFFIX_MAP.get(suffix)
4620 if ext is not None:
4621 return ext
4622
4623 return subtype.replace('+', '.')
c460bdd5
PH
4624
4625
4f3c5e06 4626def parse_codecs(codecs_str):
4627 # http://tools.ietf.org/html/rfc6381
4628 if not codecs_str:
4629 return {}
a0566bbf 4630 split_codecs = list(filter(None, map(
dbf5416a 4631 str.strip, codecs_str.strip().strip(',').split(','))))
176f1866 4632 vcodec, acodec, hdr = None, None, None
a0566bbf 4633 for full_codec in split_codecs:
4f3c5e06 4634 codec = full_codec.split('.')[0]
6993f78d 4635 if codec in ('avc1', 'avc2', 'avc3', 'avc4', 'vp9', 'vp8', 'hev1', 'hev2', 'h263', 'h264', 'mp4v', 'hvc1', 'av01', 'theora', 'dvh1', 'dvhe'):
4f3c5e06 4636 if not vcodec:
4637 vcodec = full_codec
176f1866 4638 if codec in ('dvh1', 'dvhe'):
4639 hdr = 'DV'
4640 elif codec == 'vp9' and vcodec.startswith('vp9.2'):
4641 hdr = 'HDR10'
4642 elif codec == 'av01':
4643 parts = full_codec.split('.')
4644 if len(parts) > 3 and parts[3] == '10':
4645 hdr = 'HDR10'
4646 vcodec = '.'.join(parts[:4])
60f5c9fb 4647 elif codec in ('mp4a', 'opus', 'vorbis', 'mp3', 'aac', 'ac-3', 'ec-3', 'eac3', 'dtsc', 'dtse', 'dtsh', 'dtsl'):
4f3c5e06 4648 if not acodec:
4649 acodec = full_codec
4650 else:
60f5c9fb 4651 write_string('WARNING: Unknown codec %s\n' % full_codec, sys.stderr)
4f3c5e06 4652 if not vcodec and not acodec:
a0566bbf 4653 if len(split_codecs) == 2:
4f3c5e06 4654 return {
a0566bbf 4655 'vcodec': split_codecs[0],
4656 'acodec': split_codecs[1],
4f3c5e06 4657 }
4658 else:
4659 return {
4660 'vcodec': vcodec or 'none',
4661 'acodec': acodec or 'none',
176f1866 4662 'dynamic_range': hdr,
4f3c5e06 4663 }
4664 return {}
4665
4666
2ccd1b10 4667def urlhandle_detect_ext(url_handle):
79298173 4668 getheader = url_handle.headers.get
2ccd1b10 4669
b55ee18f
PH
4670 cd = getheader('Content-Disposition')
4671 if cd:
4672 m = re.match(r'attachment;\s*filename="(?P<filename>[^"]+)"', cd)
4673 if m:
4674 e = determine_ext(m.group('filename'), default_ext=None)
4675 if e:
4676 return e
4677
c460bdd5 4678 return mimetype2ext(getheader('Content-Type'))
05900629
PH
4679
4680
1e399778
YCH
4681def encode_data_uri(data, mime_type):
4682 return 'data:%s;base64,%s' % (mime_type, base64.b64encode(data).decode('ascii'))
4683
4684
05900629 4685def age_restricted(content_limit, age_limit):
6ec6cb4e 4686 """ Returns True iff the content should be blocked """
05900629
PH
4687
4688 if age_limit is None: # No limit set
4689 return False
4690 if content_limit is None:
4691 return False # Content available for everyone
4692 return age_limit < content_limit
61ca9a80
PH
4693
4694
4695def is_html(first_bytes):
4696 """ Detect whether a file contains HTML by examining its first bytes. """
4697
4698 BOMS = [
4699 (b'\xef\xbb\xbf', 'utf-8'),
4700 (b'\x00\x00\xfe\xff', 'utf-32-be'),
4701 (b'\xff\xfe\x00\x00', 'utf-32-le'),
4702 (b'\xff\xfe', 'utf-16-le'),
4703 (b'\xfe\xff', 'utf-16-be'),
4704 ]
4705 for bom, enc in BOMS:
4706 if first_bytes.startswith(bom):
4707 s = first_bytes[len(bom):].decode(enc, 'replace')
4708 break
4709 else:
4710 s = first_bytes.decode('utf-8', 'replace')
4711
4712 return re.match(r'^\s*<', s)
a055469f
PH
4713
4714
4715def determine_protocol(info_dict):
4716 protocol = info_dict.get('protocol')
4717 if protocol is not None:
4718 return protocol
4719
4720 url = info_dict['url']
4721 if url.startswith('rtmp'):
4722 return 'rtmp'
4723 elif url.startswith('mms'):
4724 return 'mms'
4725 elif url.startswith('rtsp'):
4726 return 'rtsp'
4727
4728 ext = determine_ext(url)
4729 if ext == 'm3u8':
4730 return 'm3u8'
4731 elif ext == 'f4m':
4732 return 'f4m'
4733
4734 return compat_urllib_parse_urlparse(url).scheme
cfb56d1a
PH
4735
4736
76d321f6 4737def render_table(header_row, data, delim=False, extraGap=0, hideEmpty=False):
cfb56d1a 4738 """ Render a list of rows, each as a list of values """
76d321f6 4739
4740 def get_max_lens(table):
4741 return [max(len(compat_str(v)) for v in col) for col in zip(*table)]
4742
4743 def filter_using_list(row, filterArray):
4744 return [col for (take, col) in zip(filterArray, row) if take]
4745
4746 if hideEmpty:
4747 max_lens = get_max_lens(data)
4748 header_row = filter_using_list(header_row, max_lens)
4749 data = [filter_using_list(row, max_lens) for row in data]
4750
cfb56d1a 4751 table = [header_row] + data
76d321f6 4752 max_lens = get_max_lens(table)
4753 if delim:
4754 table = [header_row] + [['-' * ml for ml in max_lens]] + data
4755 format_str = ' '.join('%-' + compat_str(ml + extraGap) + 's' for ml in max_lens[:-1]) + ' %s'
cfb56d1a 4756 return '\n'.join(format_str % tuple(row) for row in table)
347de493
PH
4757
4758
8f18aca8 4759def _match_one(filter_part, dct, incomplete):
77b87f05 4760 # TODO: Generalize code with YoutubeDL._build_format_filter
a047eeb6 4761 STRING_OPERATORS = {
4762 '*=': operator.contains,
4763 '^=': lambda attr, value: attr.startswith(value),
4764 '$=': lambda attr, value: attr.endswith(value),
4765 '~=': lambda attr, value: re.search(value, attr),
4766 }
347de493 4767 COMPARISON_OPERATORS = {
a047eeb6 4768 **STRING_OPERATORS,
4769 '<=': operator.le, # "<=" must be defined above "<"
347de493 4770 '<': operator.lt,
347de493 4771 '>=': operator.ge,
a047eeb6 4772 '>': operator.gt,
347de493 4773 '=': operator.eq,
347de493 4774 }
a047eeb6 4775
347de493
PH
4776 operator_rex = re.compile(r'''(?x)\s*
4777 (?P<key>[a-z_]+)
77b87f05 4778 \s*(?P<negation>!\s*)?(?P<op>%s)(?P<none_inclusive>\s*\?)?\s*
347de493 4779 (?:
a047eeb6 4780 (?P<quote>["\'])(?P<quotedstrval>.+?)(?P=quote)|
4781 (?P<strval>.+?)
347de493
PH
4782 )
4783 \s*$
4784 ''' % '|'.join(map(re.escape, COMPARISON_OPERATORS.keys())))
4785 m = operator_rex.search(filter_part)
4786 if m:
18f96d12 4787 m = m.groupdict()
4788 unnegated_op = COMPARISON_OPERATORS[m['op']]
4789 if m['negation']:
77b87f05
MT
4790 op = lambda attr, value: not unnegated_op(attr, value)
4791 else:
4792 op = unnegated_op
18f96d12 4793 comparison_value = m['quotedstrval'] or m['strval'] or m['intval']
4794 if m['quote']:
4795 comparison_value = comparison_value.replace(r'\%s' % m['quote'], m['quote'])
4796 actual_value = dct.get(m['key'])
4797 numeric_comparison = None
4798 if isinstance(actual_value, compat_numeric_types):
e5a088dc
S
4799 # If the original field is a string and matching comparisonvalue is
4800 # a number we should respect the origin of the original field
4801 # and process comparison value as a string (see
18f96d12 4802 # https://github.com/ytdl-org/youtube-dl/issues/11082)
347de493 4803 try:
18f96d12 4804 numeric_comparison = int(comparison_value)
347de493 4805 except ValueError:
18f96d12 4806 numeric_comparison = parse_filesize(comparison_value)
4807 if numeric_comparison is None:
4808 numeric_comparison = parse_filesize(f'{comparison_value}B')
4809 if numeric_comparison is None:
4810 numeric_comparison = parse_duration(comparison_value)
4811 if numeric_comparison is not None and m['op'] in STRING_OPERATORS:
4812 raise ValueError('Operator %s only supports string values!' % m['op'])
347de493 4813 if actual_value is None:
18f96d12 4814 return incomplete or m['none_inclusive']
4815 return op(actual_value, comparison_value if numeric_comparison is None else numeric_comparison)
347de493
PH
4816
4817 UNARY_OPERATORS = {
1cc47c66
S
4818 '': lambda v: (v is True) if isinstance(v, bool) else (v is not None),
4819 '!': lambda v: (v is False) if isinstance(v, bool) else (v is None),
347de493
PH
4820 }
4821 operator_rex = re.compile(r'''(?x)\s*
4822 (?P<op>%s)\s*(?P<key>[a-z_]+)
4823 \s*$
4824 ''' % '|'.join(map(re.escape, UNARY_OPERATORS.keys())))
4825 m = operator_rex.search(filter_part)
4826 if m:
4827 op = UNARY_OPERATORS[m.group('op')]
4828 actual_value = dct.get(m.group('key'))
8f18aca8 4829 if incomplete and actual_value is None:
4830 return True
347de493
PH
4831 return op(actual_value)
4832
4833 raise ValueError('Invalid filter part %r' % filter_part)
4834
4835
8f18aca8 4836def match_str(filter_str, dct, incomplete=False):
4837 """ Filter a dictionary with a simple string syntax. Returns True (=passes filter) or false
4838 When incomplete, all conditions passes on missing fields
4839 """
347de493 4840 return all(
8f18aca8 4841 _match_one(filter_part.replace(r'\&', '&'), dct, incomplete)
a047eeb6 4842 for filter_part in re.split(r'(?<!\\)&', filter_str))
347de493
PH
4843
4844
4845def match_filter_func(filter_str):
8f18aca8 4846 def _match_func(info_dict, *args, **kwargs):
4847 if match_str(filter_str, info_dict, *args, **kwargs):
347de493
PH
4848 return None
4849 else:
4850 video_title = info_dict.get('title', info_dict.get('id', 'video'))
4851 return '%s does not pass filter %s, skipping ..' % (video_title, filter_str)
4852 return _match_func
91410c9b
PH
4853
4854
bf6427d2
YCH
4855def parse_dfxp_time_expr(time_expr):
4856 if not time_expr:
d631d5f9 4857 return
bf6427d2
YCH
4858
4859 mobj = re.match(r'^(?P<time_offset>\d+(?:\.\d+)?)s?$', time_expr)
4860 if mobj:
4861 return float(mobj.group('time_offset'))
4862
db2fe38b 4863 mobj = re.match(r'^(\d+):(\d\d):(\d\d(?:(?:\.|:)\d+)?)$', time_expr)
bf6427d2 4864 if mobj:
db2fe38b 4865 return 3600 * int(mobj.group(1)) + 60 * int(mobj.group(2)) + float(mobj.group(3).replace(':', '.'))
bf6427d2
YCH
4866
4867
c1c924ab 4868def srt_subtitles_timecode(seconds):
aa7785f8 4869 return '%02d:%02d:%02d,%03d' % timetuple_from_msec(seconds * 1000)
4870
4871
4872def ass_subtitles_timecode(seconds):
4873 time = timetuple_from_msec(seconds * 1000)
4874 return '%01d:%02d:%02d.%02d' % (*time[:-1], time.milliseconds / 10)
bf6427d2
YCH
4875
4876
4877def dfxp2srt(dfxp_data):
3869028f
YCH
4878 '''
4879 @param dfxp_data A bytes-like object containing DFXP data
4880 @returns A unicode object containing converted SRT data
4881 '''
5b995f71 4882 LEGACY_NAMESPACES = (
3869028f
YCH
4883 (b'http://www.w3.org/ns/ttml', [
4884 b'http://www.w3.org/2004/11/ttaf1',
4885 b'http://www.w3.org/2006/04/ttaf1',
4886 b'http://www.w3.org/2006/10/ttaf1',
5b995f71 4887 ]),
3869028f
YCH
4888 (b'http://www.w3.org/ns/ttml#styling', [
4889 b'http://www.w3.org/ns/ttml#style',
5b995f71
RA
4890 ]),
4891 )
4892
4893 SUPPORTED_STYLING = [
4894 'color',
4895 'fontFamily',
4896 'fontSize',
4897 'fontStyle',
4898 'fontWeight',
4899 'textDecoration'
4900 ]
4901
4e335771 4902 _x = functools.partial(xpath_with_ns, ns_map={
261f4730 4903 'xml': 'http://www.w3.org/XML/1998/namespace',
4e335771 4904 'ttml': 'http://www.w3.org/ns/ttml',
5b995f71 4905 'tts': 'http://www.w3.org/ns/ttml#styling',
4e335771 4906 })
bf6427d2 4907
5b995f71
RA
4908 styles = {}
4909 default_style = {}
4910
87de7069 4911 class TTMLPElementParser(object):
5b995f71
RA
4912 _out = ''
4913 _unclosed_elements = []
4914 _applied_styles = []
bf6427d2 4915
2b14cb56 4916 def start(self, tag, attrib):
5b995f71
RA
4917 if tag in (_x('ttml:br'), 'br'):
4918 self._out += '\n'
4919 else:
4920 unclosed_elements = []
4921 style = {}
4922 element_style_id = attrib.get('style')
4923 if default_style:
4924 style.update(default_style)
4925 if element_style_id:
4926 style.update(styles.get(element_style_id, {}))
4927 for prop in SUPPORTED_STYLING:
4928 prop_val = attrib.get(_x('tts:' + prop))
4929 if prop_val:
4930 style[prop] = prop_val
4931 if style:
4932 font = ''
4933 for k, v in sorted(style.items()):
4934 if self._applied_styles and self._applied_styles[-1].get(k) == v:
4935 continue
4936 if k == 'color':
4937 font += ' color="%s"' % v
4938 elif k == 'fontSize':
4939 font += ' size="%s"' % v
4940 elif k == 'fontFamily':
4941 font += ' face="%s"' % v
4942 elif k == 'fontWeight' and v == 'bold':
4943 self._out += '<b>'
4944 unclosed_elements.append('b')
4945 elif k == 'fontStyle' and v == 'italic':
4946 self._out += '<i>'
4947 unclosed_elements.append('i')
4948 elif k == 'textDecoration' and v == 'underline':
4949 self._out += '<u>'
4950 unclosed_elements.append('u')
4951 if font:
4952 self._out += '<font' + font + '>'
4953 unclosed_elements.append('font')
4954 applied_style = {}
4955 if self._applied_styles:
4956 applied_style.update(self._applied_styles[-1])
4957 applied_style.update(style)
4958 self._applied_styles.append(applied_style)
4959 self._unclosed_elements.append(unclosed_elements)
bf6427d2 4960
2b14cb56 4961 def end(self, tag):
5b995f71
RA
4962 if tag not in (_x('ttml:br'), 'br'):
4963 unclosed_elements = self._unclosed_elements.pop()
4964 for element in reversed(unclosed_elements):
4965 self._out += '</%s>' % element
4966 if unclosed_elements and self._applied_styles:
4967 self._applied_styles.pop()
bf6427d2 4968
2b14cb56 4969 def data(self, data):
5b995f71 4970 self._out += data
2b14cb56 4971
4972 def close(self):
5b995f71 4973 return self._out.strip()
2b14cb56 4974
4975 def parse_node(node):
4976 target = TTMLPElementParser()
4977 parser = xml.etree.ElementTree.XMLParser(target=target)
4978 parser.feed(xml.etree.ElementTree.tostring(node))
4979 return parser.close()
bf6427d2 4980
5b995f71
RA
4981 for k, v in LEGACY_NAMESPACES:
4982 for ns in v:
4983 dfxp_data = dfxp_data.replace(ns, k)
4984
3869028f 4985 dfxp = compat_etree_fromstring(dfxp_data)
bf6427d2 4986 out = []
5b995f71 4987 paras = dfxp.findall(_x('.//ttml:p')) or dfxp.findall('.//p')
1b0427e6
YCH
4988
4989 if not paras:
4990 raise ValueError('Invalid dfxp/TTML subtitle')
bf6427d2 4991
5b995f71
RA
4992 repeat = False
4993 while True:
4994 for style in dfxp.findall(_x('.//ttml:style')):
261f4730
RA
4995 style_id = style.get('id') or style.get(_x('xml:id'))
4996 if not style_id:
4997 continue
5b995f71
RA
4998 parent_style_id = style.get('style')
4999 if parent_style_id:
5000 if parent_style_id not in styles:
5001 repeat = True
5002 continue
5003 styles[style_id] = styles[parent_style_id].copy()
5004 for prop in SUPPORTED_STYLING:
5005 prop_val = style.get(_x('tts:' + prop))
5006 if prop_val:
5007 styles.setdefault(style_id, {})[prop] = prop_val
5008 if repeat:
5009 repeat = False
5010 else:
5011 break
5012
5013 for p in ('body', 'div'):
5014 ele = xpath_element(dfxp, [_x('.//ttml:' + p), './/' + p])
5015 if ele is None:
5016 continue
5017 style = styles.get(ele.get('style'))
5018 if not style:
5019 continue
5020 default_style.update(style)
5021
bf6427d2 5022 for para, index in zip(paras, itertools.count(1)):
d631d5f9 5023 begin_time = parse_dfxp_time_expr(para.attrib.get('begin'))
7dff0363 5024 end_time = parse_dfxp_time_expr(para.attrib.get('end'))
d631d5f9
YCH
5025 dur = parse_dfxp_time_expr(para.attrib.get('dur'))
5026 if begin_time is None:
5027 continue
7dff0363 5028 if not end_time:
d631d5f9
YCH
5029 if not dur:
5030 continue
5031 end_time = begin_time + dur
bf6427d2
YCH
5032 out.append('%d\n%s --> %s\n%s\n\n' % (
5033 index,
c1c924ab
YCH
5034 srt_subtitles_timecode(begin_time),
5035 srt_subtitles_timecode(end_time),
bf6427d2
YCH
5036 parse_node(para)))
5037
5038 return ''.join(out)
5039
5040
66e289ba
S
5041def cli_option(params, command_option, param):
5042 param = params.get(param)
98e698f1
RA
5043 if param:
5044 param = compat_str(param)
66e289ba
S
5045 return [command_option, param] if param is not None else []
5046
5047
5048def cli_bool_option(params, command_option, param, true_value='true', false_value='false', separator=None):
5049 param = params.get(param)
5b232f46
S
5050 if param is None:
5051 return []
66e289ba
S
5052 assert isinstance(param, bool)
5053 if separator:
5054 return [command_option + separator + (true_value if param else false_value)]
5055 return [command_option, true_value if param else false_value]
5056
5057
5058def cli_valueless_option(params, command_option, param, expected_value=True):
5059 param = params.get(param)
5060 return [command_option] if param == expected_value else []
5061
5062
e92caff5 5063def cli_configuration_args(argdict, keys, default=[], use_compat=True):
eab9b2bc 5064 if isinstance(argdict, (list, tuple)): # for backward compatibility
e92caff5 5065 if use_compat:
5b1ecbb3 5066 return argdict
5067 else:
5068 argdict = None
eab9b2bc 5069 if argdict is None:
5b1ecbb3 5070 return default
eab9b2bc 5071 assert isinstance(argdict, dict)
5072
e92caff5 5073 assert isinstance(keys, (list, tuple))
5074 for key_list in keys:
e92caff5 5075 arg_list = list(filter(
5076 lambda x: x is not None,
6606817a 5077 [argdict.get(key.lower()) for key in variadic(key_list)]))
e92caff5 5078 if arg_list:
5079 return [arg for args in arg_list for arg in args]
5080 return default
66e289ba 5081
6251555f 5082
330690a2 5083def _configuration_args(main_key, argdict, exe, keys=None, default=[], use_compat=True):
5084 main_key, exe = main_key.lower(), exe.lower()
5085 root_key = exe if main_key == exe else f'{main_key}+{exe}'
5086 keys = [f'{root_key}{k}' for k in (keys or [''])]
5087 if root_key in keys:
5088 if main_key != exe:
5089 keys.append((main_key, exe))
5090 keys.append('default')
5091 else:
5092 use_compat = False
5093 return cli_configuration_args(argdict, keys, default, use_compat)
5094
66e289ba 5095
39672624
YCH
5096class ISO639Utils(object):
5097 # See http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt
5098 _lang_map = {
5099 'aa': 'aar',
5100 'ab': 'abk',
5101 'ae': 'ave',
5102 'af': 'afr',
5103 'ak': 'aka',
5104 'am': 'amh',
5105 'an': 'arg',
5106 'ar': 'ara',
5107 'as': 'asm',
5108 'av': 'ava',
5109 'ay': 'aym',
5110 'az': 'aze',
5111 'ba': 'bak',
5112 'be': 'bel',
5113 'bg': 'bul',
5114 'bh': 'bih',
5115 'bi': 'bis',
5116 'bm': 'bam',
5117 'bn': 'ben',
5118 'bo': 'bod',
5119 'br': 'bre',
5120 'bs': 'bos',
5121 'ca': 'cat',
5122 'ce': 'che',
5123 'ch': 'cha',
5124 'co': 'cos',
5125 'cr': 'cre',
5126 'cs': 'ces',
5127 'cu': 'chu',
5128 'cv': 'chv',
5129 'cy': 'cym',
5130 'da': 'dan',
5131 'de': 'deu',
5132 'dv': 'div',
5133 'dz': 'dzo',
5134 'ee': 'ewe',
5135 'el': 'ell',
5136 'en': 'eng',
5137 'eo': 'epo',
5138 'es': 'spa',
5139 'et': 'est',
5140 'eu': 'eus',
5141 'fa': 'fas',
5142 'ff': 'ful',
5143 'fi': 'fin',
5144 'fj': 'fij',
5145 'fo': 'fao',
5146 'fr': 'fra',
5147 'fy': 'fry',
5148 'ga': 'gle',
5149 'gd': 'gla',
5150 'gl': 'glg',
5151 'gn': 'grn',
5152 'gu': 'guj',
5153 'gv': 'glv',
5154 'ha': 'hau',
5155 'he': 'heb',
b7acc835 5156 'iw': 'heb', # Replaced by he in 1989 revision
39672624
YCH
5157 'hi': 'hin',
5158 'ho': 'hmo',
5159 'hr': 'hrv',
5160 'ht': 'hat',
5161 'hu': 'hun',
5162 'hy': 'hye',
5163 'hz': 'her',
5164 'ia': 'ina',
5165 'id': 'ind',
b7acc835 5166 'in': 'ind', # Replaced by id in 1989 revision
39672624
YCH
5167 'ie': 'ile',
5168 'ig': 'ibo',
5169 'ii': 'iii',
5170 'ik': 'ipk',
5171 'io': 'ido',
5172 'is': 'isl',
5173 'it': 'ita',
5174 'iu': 'iku',
5175 'ja': 'jpn',
5176 'jv': 'jav',
5177 'ka': 'kat',
5178 'kg': 'kon',
5179 'ki': 'kik',
5180 'kj': 'kua',
5181 'kk': 'kaz',
5182 'kl': 'kal',
5183 'km': 'khm',
5184 'kn': 'kan',
5185 'ko': 'kor',
5186 'kr': 'kau',
5187 'ks': 'kas',
5188 'ku': 'kur',
5189 'kv': 'kom',
5190 'kw': 'cor',
5191 'ky': 'kir',
5192 'la': 'lat',
5193 'lb': 'ltz',
5194 'lg': 'lug',
5195 'li': 'lim',
5196 'ln': 'lin',
5197 'lo': 'lao',
5198 'lt': 'lit',
5199 'lu': 'lub',
5200 'lv': 'lav',
5201 'mg': 'mlg',
5202 'mh': 'mah',
5203 'mi': 'mri',
5204 'mk': 'mkd',
5205 'ml': 'mal',
5206 'mn': 'mon',
5207 'mr': 'mar',
5208 'ms': 'msa',
5209 'mt': 'mlt',
5210 'my': 'mya',
5211 'na': 'nau',
5212 'nb': 'nob',
5213 'nd': 'nde',
5214 'ne': 'nep',
5215 'ng': 'ndo',
5216 'nl': 'nld',
5217 'nn': 'nno',
5218 'no': 'nor',
5219 'nr': 'nbl',
5220 'nv': 'nav',
5221 'ny': 'nya',
5222 'oc': 'oci',
5223 'oj': 'oji',
5224 'om': 'orm',
5225 'or': 'ori',
5226 'os': 'oss',
5227 'pa': 'pan',
5228 'pi': 'pli',
5229 'pl': 'pol',
5230 'ps': 'pus',
5231 'pt': 'por',
5232 'qu': 'que',
5233 'rm': 'roh',
5234 'rn': 'run',
5235 'ro': 'ron',
5236 'ru': 'rus',
5237 'rw': 'kin',
5238 'sa': 'san',
5239 'sc': 'srd',
5240 'sd': 'snd',
5241 'se': 'sme',
5242 'sg': 'sag',
5243 'si': 'sin',
5244 'sk': 'slk',
5245 'sl': 'slv',
5246 'sm': 'smo',
5247 'sn': 'sna',
5248 'so': 'som',
5249 'sq': 'sqi',
5250 'sr': 'srp',
5251 'ss': 'ssw',
5252 'st': 'sot',
5253 'su': 'sun',
5254 'sv': 'swe',
5255 'sw': 'swa',
5256 'ta': 'tam',
5257 'te': 'tel',
5258 'tg': 'tgk',
5259 'th': 'tha',
5260 'ti': 'tir',
5261 'tk': 'tuk',
5262 'tl': 'tgl',
5263 'tn': 'tsn',
5264 'to': 'ton',
5265 'tr': 'tur',
5266 'ts': 'tso',
5267 'tt': 'tat',
5268 'tw': 'twi',
5269 'ty': 'tah',
5270 'ug': 'uig',
5271 'uk': 'ukr',
5272 'ur': 'urd',
5273 'uz': 'uzb',
5274 've': 'ven',
5275 'vi': 'vie',
5276 'vo': 'vol',
5277 'wa': 'wln',
5278 'wo': 'wol',
5279 'xh': 'xho',
5280 'yi': 'yid',
e9a50fba 5281 'ji': 'yid', # Replaced by yi in 1989 revision
39672624
YCH
5282 'yo': 'yor',
5283 'za': 'zha',
5284 'zh': 'zho',
5285 'zu': 'zul',
5286 }
5287
5288 @classmethod
5289 def short2long(cls, code):
5290 """Convert language code from ISO 639-1 to ISO 639-2/T"""
5291 return cls._lang_map.get(code[:2])
5292
5293 @classmethod
5294 def long2short(cls, code):
5295 """Convert language code from ISO 639-2/T to ISO 639-1"""
5296 for short_name, long_name in cls._lang_map.items():
5297 if long_name == code:
5298 return short_name
5299
5300
4eb10f66
YCH
5301class ISO3166Utils(object):
5302 # From http://data.okfn.org/data/core/country-list
5303 _country_map = {
5304 'AF': 'Afghanistan',
5305 'AX': 'Åland Islands',
5306 'AL': 'Albania',
5307 'DZ': 'Algeria',
5308 'AS': 'American Samoa',
5309 'AD': 'Andorra',
5310 'AO': 'Angola',
5311 'AI': 'Anguilla',
5312 'AQ': 'Antarctica',
5313 'AG': 'Antigua and Barbuda',
5314 'AR': 'Argentina',
5315 'AM': 'Armenia',
5316 'AW': 'Aruba',
5317 'AU': 'Australia',
5318 'AT': 'Austria',
5319 'AZ': 'Azerbaijan',
5320 'BS': 'Bahamas',
5321 'BH': 'Bahrain',
5322 'BD': 'Bangladesh',
5323 'BB': 'Barbados',
5324 'BY': 'Belarus',
5325 'BE': 'Belgium',
5326 'BZ': 'Belize',
5327 'BJ': 'Benin',
5328 'BM': 'Bermuda',
5329 'BT': 'Bhutan',
5330 'BO': 'Bolivia, Plurinational State of',
5331 'BQ': 'Bonaire, Sint Eustatius and Saba',
5332 'BA': 'Bosnia and Herzegovina',
5333 'BW': 'Botswana',
5334 'BV': 'Bouvet Island',
5335 'BR': 'Brazil',
5336 'IO': 'British Indian Ocean Territory',
5337 'BN': 'Brunei Darussalam',
5338 'BG': 'Bulgaria',
5339 'BF': 'Burkina Faso',
5340 'BI': 'Burundi',
5341 'KH': 'Cambodia',
5342 'CM': 'Cameroon',
5343 'CA': 'Canada',
5344 'CV': 'Cape Verde',
5345 'KY': 'Cayman Islands',
5346 'CF': 'Central African Republic',
5347 'TD': 'Chad',
5348 'CL': 'Chile',
5349 'CN': 'China',
5350 'CX': 'Christmas Island',
5351 'CC': 'Cocos (Keeling) Islands',
5352 'CO': 'Colombia',
5353 'KM': 'Comoros',
5354 'CG': 'Congo',
5355 'CD': 'Congo, the Democratic Republic of the',
5356 'CK': 'Cook Islands',
5357 'CR': 'Costa Rica',
5358 'CI': 'Côte d\'Ivoire',
5359 'HR': 'Croatia',
5360 'CU': 'Cuba',
5361 'CW': 'Curaçao',
5362 'CY': 'Cyprus',
5363 'CZ': 'Czech Republic',
5364 'DK': 'Denmark',
5365 'DJ': 'Djibouti',
5366 'DM': 'Dominica',
5367 'DO': 'Dominican Republic',
5368 'EC': 'Ecuador',
5369 'EG': 'Egypt',
5370 'SV': 'El Salvador',
5371 'GQ': 'Equatorial Guinea',
5372 'ER': 'Eritrea',
5373 'EE': 'Estonia',
5374 'ET': 'Ethiopia',
5375 'FK': 'Falkland Islands (Malvinas)',
5376 'FO': 'Faroe Islands',
5377 'FJ': 'Fiji',
5378 'FI': 'Finland',
5379 'FR': 'France',
5380 'GF': 'French Guiana',
5381 'PF': 'French Polynesia',
5382 'TF': 'French Southern Territories',
5383 'GA': 'Gabon',
5384 'GM': 'Gambia',
5385 'GE': 'Georgia',
5386 'DE': 'Germany',
5387 'GH': 'Ghana',
5388 'GI': 'Gibraltar',
5389 'GR': 'Greece',
5390 'GL': 'Greenland',
5391 'GD': 'Grenada',
5392 'GP': 'Guadeloupe',
5393 'GU': 'Guam',
5394 'GT': 'Guatemala',
5395 'GG': 'Guernsey',
5396 'GN': 'Guinea',
5397 'GW': 'Guinea-Bissau',
5398 'GY': 'Guyana',
5399 'HT': 'Haiti',
5400 'HM': 'Heard Island and McDonald Islands',
5401 'VA': 'Holy See (Vatican City State)',
5402 'HN': 'Honduras',
5403 'HK': 'Hong Kong',
5404 'HU': 'Hungary',
5405 'IS': 'Iceland',
5406 'IN': 'India',
5407 'ID': 'Indonesia',
5408 'IR': 'Iran, Islamic Republic of',
5409 'IQ': 'Iraq',
5410 'IE': 'Ireland',
5411 'IM': 'Isle of Man',
5412 'IL': 'Israel',
5413 'IT': 'Italy',
5414 'JM': 'Jamaica',
5415 'JP': 'Japan',
5416 'JE': 'Jersey',
5417 'JO': 'Jordan',
5418 'KZ': 'Kazakhstan',
5419 'KE': 'Kenya',
5420 'KI': 'Kiribati',
5421 'KP': 'Korea, Democratic People\'s Republic of',
5422 'KR': 'Korea, Republic of',
5423 'KW': 'Kuwait',
5424 'KG': 'Kyrgyzstan',
5425 'LA': 'Lao People\'s Democratic Republic',
5426 'LV': 'Latvia',
5427 'LB': 'Lebanon',
5428 'LS': 'Lesotho',
5429 'LR': 'Liberia',
5430 'LY': 'Libya',
5431 'LI': 'Liechtenstein',
5432 'LT': 'Lithuania',
5433 'LU': 'Luxembourg',
5434 'MO': 'Macao',
5435 'MK': 'Macedonia, the Former Yugoslav Republic of',
5436 'MG': 'Madagascar',
5437 'MW': 'Malawi',
5438 'MY': 'Malaysia',
5439 'MV': 'Maldives',
5440 'ML': 'Mali',
5441 'MT': 'Malta',
5442 'MH': 'Marshall Islands',
5443 'MQ': 'Martinique',
5444 'MR': 'Mauritania',
5445 'MU': 'Mauritius',
5446 'YT': 'Mayotte',
5447 'MX': 'Mexico',
5448 'FM': 'Micronesia, Federated States of',
5449 'MD': 'Moldova, Republic of',
5450 'MC': 'Monaco',
5451 'MN': 'Mongolia',
5452 'ME': 'Montenegro',
5453 'MS': 'Montserrat',
5454 'MA': 'Morocco',
5455 'MZ': 'Mozambique',
5456 'MM': 'Myanmar',
5457 'NA': 'Namibia',
5458 'NR': 'Nauru',
5459 'NP': 'Nepal',
5460 'NL': 'Netherlands',
5461 'NC': 'New Caledonia',
5462 'NZ': 'New Zealand',
5463 'NI': 'Nicaragua',
5464 'NE': 'Niger',
5465 'NG': 'Nigeria',
5466 'NU': 'Niue',
5467 'NF': 'Norfolk Island',
5468 'MP': 'Northern Mariana Islands',
5469 'NO': 'Norway',
5470 'OM': 'Oman',
5471 'PK': 'Pakistan',
5472 'PW': 'Palau',
5473 'PS': 'Palestine, State of',
5474 'PA': 'Panama',
5475 'PG': 'Papua New Guinea',
5476 'PY': 'Paraguay',
5477 'PE': 'Peru',
5478 'PH': 'Philippines',
5479 'PN': 'Pitcairn',
5480 'PL': 'Poland',
5481 'PT': 'Portugal',
5482 'PR': 'Puerto Rico',
5483 'QA': 'Qatar',
5484 'RE': 'Réunion',
5485 'RO': 'Romania',
5486 'RU': 'Russian Federation',
5487 'RW': 'Rwanda',
5488 'BL': 'Saint Barthélemy',
5489 'SH': 'Saint Helena, Ascension and Tristan da Cunha',
5490 'KN': 'Saint Kitts and Nevis',
5491 'LC': 'Saint Lucia',
5492 'MF': 'Saint Martin (French part)',
5493 'PM': 'Saint Pierre and Miquelon',
5494 'VC': 'Saint Vincent and the Grenadines',
5495 'WS': 'Samoa',
5496 'SM': 'San Marino',
5497 'ST': 'Sao Tome and Principe',
5498 'SA': 'Saudi Arabia',
5499 'SN': 'Senegal',
5500 'RS': 'Serbia',
5501 'SC': 'Seychelles',
5502 'SL': 'Sierra Leone',
5503 'SG': 'Singapore',
5504 'SX': 'Sint Maarten (Dutch part)',
5505 'SK': 'Slovakia',
5506 'SI': 'Slovenia',
5507 'SB': 'Solomon Islands',
5508 'SO': 'Somalia',
5509 'ZA': 'South Africa',
5510 'GS': 'South Georgia and the South Sandwich Islands',
5511 'SS': 'South Sudan',
5512 'ES': 'Spain',
5513 'LK': 'Sri Lanka',
5514 'SD': 'Sudan',
5515 'SR': 'Suriname',
5516 'SJ': 'Svalbard and Jan Mayen',
5517 'SZ': 'Swaziland',
5518 'SE': 'Sweden',
5519 'CH': 'Switzerland',
5520 'SY': 'Syrian Arab Republic',
5521 'TW': 'Taiwan, Province of China',
5522 'TJ': 'Tajikistan',
5523 'TZ': 'Tanzania, United Republic of',
5524 'TH': 'Thailand',
5525 'TL': 'Timor-Leste',
5526 'TG': 'Togo',
5527 'TK': 'Tokelau',
5528 'TO': 'Tonga',
5529 'TT': 'Trinidad and Tobago',
5530 'TN': 'Tunisia',
5531 'TR': 'Turkey',
5532 'TM': 'Turkmenistan',
5533 'TC': 'Turks and Caicos Islands',
5534 'TV': 'Tuvalu',
5535 'UG': 'Uganda',
5536 'UA': 'Ukraine',
5537 'AE': 'United Arab Emirates',
5538 'GB': 'United Kingdom',
5539 'US': 'United States',
5540 'UM': 'United States Minor Outlying Islands',
5541 'UY': 'Uruguay',
5542 'UZ': 'Uzbekistan',
5543 'VU': 'Vanuatu',
5544 'VE': 'Venezuela, Bolivarian Republic of',
5545 'VN': 'Viet Nam',
5546 'VG': 'Virgin Islands, British',
5547 'VI': 'Virgin Islands, U.S.',
5548 'WF': 'Wallis and Futuna',
5549 'EH': 'Western Sahara',
5550 'YE': 'Yemen',
5551 'ZM': 'Zambia',
5552 'ZW': 'Zimbabwe',
5553 }
5554
5555 @classmethod
5556 def short2full(cls, code):
5557 """Convert an ISO 3166-2 country code to the corresponding full name"""
5558 return cls._country_map.get(code.upper())
5559
5560
773f291d
S
5561class GeoUtils(object):
5562 # Major IPv4 address blocks per country
5563 _country_ip_map = {
53896ca5 5564 'AD': '46.172.224.0/19',
773f291d
S
5565 'AE': '94.200.0.0/13',
5566 'AF': '149.54.0.0/17',
5567 'AG': '209.59.64.0/18',
5568 'AI': '204.14.248.0/21',
5569 'AL': '46.99.0.0/16',
5570 'AM': '46.70.0.0/15',
5571 'AO': '105.168.0.0/13',
53896ca5
S
5572 'AP': '182.50.184.0/21',
5573 'AQ': '23.154.160.0/24',
773f291d
S
5574 'AR': '181.0.0.0/12',
5575 'AS': '202.70.112.0/20',
53896ca5 5576 'AT': '77.116.0.0/14',
773f291d
S
5577 'AU': '1.128.0.0/11',
5578 'AW': '181.41.0.0/18',
53896ca5
S
5579 'AX': '185.217.4.0/22',
5580 'AZ': '5.197.0.0/16',
773f291d
S
5581 'BA': '31.176.128.0/17',
5582 'BB': '65.48.128.0/17',
5583 'BD': '114.130.0.0/16',
5584 'BE': '57.0.0.0/8',
53896ca5 5585 'BF': '102.178.0.0/15',
773f291d
S
5586 'BG': '95.42.0.0/15',
5587 'BH': '37.131.0.0/17',
5588 'BI': '154.117.192.0/18',
5589 'BJ': '137.255.0.0/16',
53896ca5 5590 'BL': '185.212.72.0/23',
773f291d
S
5591 'BM': '196.12.64.0/18',
5592 'BN': '156.31.0.0/16',
5593 'BO': '161.56.0.0/16',
5594 'BQ': '161.0.80.0/20',
53896ca5 5595 'BR': '191.128.0.0/12',
773f291d
S
5596 'BS': '24.51.64.0/18',
5597 'BT': '119.2.96.0/19',
5598 'BW': '168.167.0.0/16',
5599 'BY': '178.120.0.0/13',
5600 'BZ': '179.42.192.0/18',
5601 'CA': '99.224.0.0/11',
5602 'CD': '41.243.0.0/16',
53896ca5
S
5603 'CF': '197.242.176.0/21',
5604 'CG': '160.113.0.0/16',
773f291d 5605 'CH': '85.0.0.0/13',
53896ca5 5606 'CI': '102.136.0.0/14',
773f291d
S
5607 'CK': '202.65.32.0/19',
5608 'CL': '152.172.0.0/14',
53896ca5 5609 'CM': '102.244.0.0/14',
773f291d
S
5610 'CN': '36.128.0.0/10',
5611 'CO': '181.240.0.0/12',
5612 'CR': '201.192.0.0/12',
5613 'CU': '152.206.0.0/15',
5614 'CV': '165.90.96.0/19',
5615 'CW': '190.88.128.0/17',
53896ca5 5616 'CY': '31.153.0.0/16',
773f291d
S
5617 'CZ': '88.100.0.0/14',
5618 'DE': '53.0.0.0/8',
5619 'DJ': '197.241.0.0/17',
5620 'DK': '87.48.0.0/12',
5621 'DM': '192.243.48.0/20',
5622 'DO': '152.166.0.0/15',
5623 'DZ': '41.96.0.0/12',
5624 'EC': '186.68.0.0/15',
5625 'EE': '90.190.0.0/15',
5626 'EG': '156.160.0.0/11',
5627 'ER': '196.200.96.0/20',
5628 'ES': '88.0.0.0/11',
5629 'ET': '196.188.0.0/14',
5630 'EU': '2.16.0.0/13',
5631 'FI': '91.152.0.0/13',
5632 'FJ': '144.120.0.0/16',
53896ca5 5633 'FK': '80.73.208.0/21',
773f291d
S
5634 'FM': '119.252.112.0/20',
5635 'FO': '88.85.32.0/19',
5636 'FR': '90.0.0.0/9',
5637 'GA': '41.158.0.0/15',
5638 'GB': '25.0.0.0/8',
5639 'GD': '74.122.88.0/21',
5640 'GE': '31.146.0.0/16',
5641 'GF': '161.22.64.0/18',
5642 'GG': '62.68.160.0/19',
53896ca5
S
5643 'GH': '154.160.0.0/12',
5644 'GI': '95.164.0.0/16',
773f291d
S
5645 'GL': '88.83.0.0/19',
5646 'GM': '160.182.0.0/15',
5647 'GN': '197.149.192.0/18',
5648 'GP': '104.250.0.0/19',
5649 'GQ': '105.235.224.0/20',
5650 'GR': '94.64.0.0/13',
5651 'GT': '168.234.0.0/16',
5652 'GU': '168.123.0.0/16',
5653 'GW': '197.214.80.0/20',
5654 'GY': '181.41.64.0/18',
5655 'HK': '113.252.0.0/14',
5656 'HN': '181.210.0.0/16',
5657 'HR': '93.136.0.0/13',
5658 'HT': '148.102.128.0/17',
5659 'HU': '84.0.0.0/14',
5660 'ID': '39.192.0.0/10',
5661 'IE': '87.32.0.0/12',
5662 'IL': '79.176.0.0/13',
5663 'IM': '5.62.80.0/20',
5664 'IN': '117.192.0.0/10',
5665 'IO': '203.83.48.0/21',
5666 'IQ': '37.236.0.0/14',
5667 'IR': '2.176.0.0/12',
5668 'IS': '82.221.0.0/16',
5669 'IT': '79.0.0.0/10',
5670 'JE': '87.244.64.0/18',
5671 'JM': '72.27.0.0/17',
5672 'JO': '176.29.0.0/16',
53896ca5 5673 'JP': '133.0.0.0/8',
773f291d
S
5674 'KE': '105.48.0.0/12',
5675 'KG': '158.181.128.0/17',
5676 'KH': '36.37.128.0/17',
5677 'KI': '103.25.140.0/22',
5678 'KM': '197.255.224.0/20',
53896ca5 5679 'KN': '198.167.192.0/19',
773f291d
S
5680 'KP': '175.45.176.0/22',
5681 'KR': '175.192.0.0/10',
5682 'KW': '37.36.0.0/14',
5683 'KY': '64.96.0.0/15',
5684 'KZ': '2.72.0.0/13',
5685 'LA': '115.84.64.0/18',
5686 'LB': '178.135.0.0/16',
53896ca5 5687 'LC': '24.92.144.0/20',
773f291d
S
5688 'LI': '82.117.0.0/19',
5689 'LK': '112.134.0.0/15',
53896ca5 5690 'LR': '102.183.0.0/16',
773f291d
S
5691 'LS': '129.232.0.0/17',
5692 'LT': '78.56.0.0/13',
5693 'LU': '188.42.0.0/16',
5694 'LV': '46.109.0.0/16',
5695 'LY': '41.252.0.0/14',
5696 'MA': '105.128.0.0/11',
5697 'MC': '88.209.64.0/18',
5698 'MD': '37.246.0.0/16',
5699 'ME': '178.175.0.0/17',
5700 'MF': '74.112.232.0/21',
5701 'MG': '154.126.0.0/17',
5702 'MH': '117.103.88.0/21',
5703 'MK': '77.28.0.0/15',
5704 'ML': '154.118.128.0/18',
5705 'MM': '37.111.0.0/17',
5706 'MN': '49.0.128.0/17',
5707 'MO': '60.246.0.0/16',
5708 'MP': '202.88.64.0/20',
5709 'MQ': '109.203.224.0/19',
5710 'MR': '41.188.64.0/18',
5711 'MS': '208.90.112.0/22',
5712 'MT': '46.11.0.0/16',
5713 'MU': '105.16.0.0/12',
5714 'MV': '27.114.128.0/18',
53896ca5 5715 'MW': '102.70.0.0/15',
773f291d
S
5716 'MX': '187.192.0.0/11',
5717 'MY': '175.136.0.0/13',
5718 'MZ': '197.218.0.0/15',
5719 'NA': '41.182.0.0/16',
5720 'NC': '101.101.0.0/18',
5721 'NE': '197.214.0.0/18',
5722 'NF': '203.17.240.0/22',
5723 'NG': '105.112.0.0/12',
5724 'NI': '186.76.0.0/15',
5725 'NL': '145.96.0.0/11',
5726 'NO': '84.208.0.0/13',
5727 'NP': '36.252.0.0/15',
5728 'NR': '203.98.224.0/19',
5729 'NU': '49.156.48.0/22',
5730 'NZ': '49.224.0.0/14',
5731 'OM': '5.36.0.0/15',
5732 'PA': '186.72.0.0/15',
5733 'PE': '186.160.0.0/14',
5734 'PF': '123.50.64.0/18',
5735 'PG': '124.240.192.0/19',
5736 'PH': '49.144.0.0/13',
5737 'PK': '39.32.0.0/11',
5738 'PL': '83.0.0.0/11',
5739 'PM': '70.36.0.0/20',
5740 'PR': '66.50.0.0/16',
5741 'PS': '188.161.0.0/16',
5742 'PT': '85.240.0.0/13',
5743 'PW': '202.124.224.0/20',
5744 'PY': '181.120.0.0/14',
5745 'QA': '37.210.0.0/15',
53896ca5 5746 'RE': '102.35.0.0/16',
773f291d 5747 'RO': '79.112.0.0/13',
53896ca5 5748 'RS': '93.86.0.0/15',
773f291d 5749 'RU': '5.136.0.0/13',
53896ca5 5750 'RW': '41.186.0.0/16',
773f291d
S
5751 'SA': '188.48.0.0/13',
5752 'SB': '202.1.160.0/19',
5753 'SC': '154.192.0.0/11',
53896ca5 5754 'SD': '102.120.0.0/13',
773f291d 5755 'SE': '78.64.0.0/12',
53896ca5 5756 'SG': '8.128.0.0/10',
773f291d
S
5757 'SI': '188.196.0.0/14',
5758 'SK': '78.98.0.0/15',
53896ca5 5759 'SL': '102.143.0.0/17',
773f291d
S
5760 'SM': '89.186.32.0/19',
5761 'SN': '41.82.0.0/15',
53896ca5 5762 'SO': '154.115.192.0/18',
773f291d
S
5763 'SR': '186.179.128.0/17',
5764 'SS': '105.235.208.0/21',
5765 'ST': '197.159.160.0/19',
5766 'SV': '168.243.0.0/16',
5767 'SX': '190.102.0.0/20',
5768 'SY': '5.0.0.0/16',
5769 'SZ': '41.84.224.0/19',
5770 'TC': '65.255.48.0/20',
5771 'TD': '154.68.128.0/19',
5772 'TG': '196.168.0.0/14',
5773 'TH': '171.96.0.0/13',
5774 'TJ': '85.9.128.0/18',
5775 'TK': '27.96.24.0/21',
5776 'TL': '180.189.160.0/20',
5777 'TM': '95.85.96.0/19',
5778 'TN': '197.0.0.0/11',
5779 'TO': '175.176.144.0/21',
5780 'TR': '78.160.0.0/11',
5781 'TT': '186.44.0.0/15',
5782 'TV': '202.2.96.0/19',
5783 'TW': '120.96.0.0/11',
5784 'TZ': '156.156.0.0/14',
53896ca5
S
5785 'UA': '37.52.0.0/14',
5786 'UG': '102.80.0.0/13',
5787 'US': '6.0.0.0/8',
773f291d 5788 'UY': '167.56.0.0/13',
53896ca5 5789 'UZ': '84.54.64.0/18',
773f291d 5790 'VA': '212.77.0.0/19',
53896ca5 5791 'VC': '207.191.240.0/21',
773f291d 5792 'VE': '186.88.0.0/13',
53896ca5 5793 'VG': '66.81.192.0/20',
773f291d
S
5794 'VI': '146.226.0.0/16',
5795 'VN': '14.160.0.0/11',
5796 'VU': '202.80.32.0/20',
5797 'WF': '117.20.32.0/21',
5798 'WS': '202.4.32.0/19',
5799 'YE': '134.35.0.0/16',
5800 'YT': '41.242.116.0/22',
5801 'ZA': '41.0.0.0/11',
53896ca5
S
5802 'ZM': '102.144.0.0/13',
5803 'ZW': '102.177.192.0/18',
773f291d
S
5804 }
5805
5806 @classmethod
5f95927a
S
5807 def random_ipv4(cls, code_or_block):
5808 if len(code_or_block) == 2:
5809 block = cls._country_ip_map.get(code_or_block.upper())
5810 if not block:
5811 return None
5812 else:
5813 block = code_or_block
773f291d
S
5814 addr, preflen = block.split('/')
5815 addr_min = compat_struct_unpack('!L', socket.inet_aton(addr))[0]
5816 addr_max = addr_min | (0xffffffff >> int(preflen))
18a0defa 5817 return compat_str(socket.inet_ntoa(
4248dad9 5818 compat_struct_pack('!L', random.randint(addr_min, addr_max))))
773f291d
S
5819
5820
91410c9b 5821class PerRequestProxyHandler(compat_urllib_request.ProxyHandler):
2461f79d
PH
5822 def __init__(self, proxies=None):
5823 # Set default handlers
5824 for type in ('http', 'https'):
5825 setattr(self, '%s_open' % type,
5826 lambda r, proxy='__noproxy__', type=type, meth=self.proxy_open:
5827 meth(r, proxy, type))
38e87f6c 5828 compat_urllib_request.ProxyHandler.__init__(self, proxies)
2461f79d 5829
91410c9b 5830 def proxy_open(self, req, proxy, type):
2461f79d 5831 req_proxy = req.headers.get('Ytdl-request-proxy')
91410c9b
PH
5832 if req_proxy is not None:
5833 proxy = req_proxy
2461f79d
PH
5834 del req.headers['Ytdl-request-proxy']
5835
5836 if proxy == '__noproxy__':
5837 return None # No Proxy
51fb4995 5838 if compat_urlparse.urlparse(proxy).scheme.lower() in ('socks', 'socks4', 'socks4a', 'socks5'):
71aff188 5839 req.add_header('Ytdl-socks-proxy', proxy)
7a5c1cfe 5840 # yt-dlp's http/https handlers do wrapping the socket with socks
71aff188 5841 return None
91410c9b
PH
5842 return compat_urllib_request.ProxyHandler.proxy_open(
5843 self, req, proxy, type)
5bc880b9
YCH
5844
5845
0a5445dd
YCH
5846# Both long_to_bytes and bytes_to_long are adapted from PyCrypto, which is
5847# released into Public Domain
5848# https://github.com/dlitz/pycrypto/blob/master/lib/Crypto/Util/number.py#L387
5849
5850def long_to_bytes(n, blocksize=0):
5851 """long_to_bytes(n:long, blocksize:int) : string
5852 Convert a long integer to a byte string.
5853
5854 If optional blocksize is given and greater than zero, pad the front of the
5855 byte string with binary zeros so that the length is a multiple of
5856 blocksize.
5857 """
5858 # after much testing, this algorithm was deemed to be the fastest
5859 s = b''
5860 n = int(n)
5861 while n > 0:
5862 s = compat_struct_pack('>I', n & 0xffffffff) + s
5863 n = n >> 32
5864 # strip off leading zeros
5865 for i in range(len(s)):
5866 if s[i] != b'\000'[0]:
5867 break
5868 else:
5869 # only happens when n == 0
5870 s = b'\000'
5871 i = 0
5872 s = s[i:]
5873 # add back some pad bytes. this could be done more efficiently w.r.t. the
5874 # de-padding being done above, but sigh...
5875 if blocksize > 0 and len(s) % blocksize:
5876 s = (blocksize - len(s) % blocksize) * b'\000' + s
5877 return s
5878
5879
5880def bytes_to_long(s):
5881 """bytes_to_long(string) : long
5882 Convert a byte string to a long integer.
5883
5884 This is (essentially) the inverse of long_to_bytes().
5885 """
5886 acc = 0
5887 length = len(s)
5888 if length % 4:
5889 extra = (4 - length % 4)
5890 s = b'\000' * extra + s
5891 length = length + extra
5892 for i in range(0, length, 4):
5893 acc = (acc << 32) + compat_struct_unpack('>I', s[i:i + 4])[0]
5894 return acc
5895
5896
5bc880b9
YCH
5897def ohdave_rsa_encrypt(data, exponent, modulus):
5898 '''
5899 Implement OHDave's RSA algorithm. See http://www.ohdave.com/rsa/
5900
5901 Input:
5902 data: data to encrypt, bytes-like object
5903 exponent, modulus: parameter e and N of RSA algorithm, both integer
5904 Output: hex string of encrypted data
5905
5906 Limitation: supports one block encryption only
5907 '''
5908
5909 payload = int(binascii.hexlify(data[::-1]), 16)
5910 encrypted = pow(payload, exponent, modulus)
5911 return '%x' % encrypted
81bdc8fd
YCH
5912
5913
f48409c7
YCH
5914def pkcs1pad(data, length):
5915 """
5916 Padding input data with PKCS#1 scheme
5917
5918 @param {int[]} data input data
5919 @param {int} length target length
5920 @returns {int[]} padded data
5921 """
5922 if len(data) > length - 11:
5923 raise ValueError('Input data too long for PKCS#1 padding')
5924
5925 pseudo_random = [random.randint(0, 254) for _ in range(length - len(data) - 3)]
5926 return [0, 2] + pseudo_random + [0] + data
5927
5928
5eb6bdce 5929def encode_base_n(num, n, table=None):
59f898b7 5930 FULL_TABLE = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
59f898b7
YCH
5931 if not table:
5932 table = FULL_TABLE[:n]
5933
5eb6bdce
YCH
5934 if n > len(table):
5935 raise ValueError('base %d exceeds table length %d' % (n, len(table)))
5936
5937 if num == 0:
5938 return table[0]
5939
81bdc8fd
YCH
5940 ret = ''
5941 while num:
5942 ret = table[num % n] + ret
5943 num = num // n
5944 return ret
f52354a8
YCH
5945
5946
5947def decode_packed_codes(code):
06b3fe29 5948 mobj = re.search(PACKED_CODES_RE, code)
a0566bbf 5949 obfuscated_code, base, count, symbols = mobj.groups()
f52354a8
YCH
5950 base = int(base)
5951 count = int(count)
5952 symbols = symbols.split('|')
5953 symbol_table = {}
5954
5955 while count:
5956 count -= 1
5eb6bdce 5957 base_n_count = encode_base_n(count, base)
f52354a8
YCH
5958 symbol_table[base_n_count] = symbols[count] or base_n_count
5959
5960 return re.sub(
5961 r'\b(\w+)\b', lambda mobj: symbol_table[mobj.group(0)],
a0566bbf 5962 obfuscated_code)
e154c651 5963
5964
1ced2221
S
5965def caesar(s, alphabet, shift):
5966 if shift == 0:
5967 return s
5968 l = len(alphabet)
5969 return ''.join(
5970 alphabet[(alphabet.index(c) + shift) % l] if c in alphabet else c
5971 for c in s)
5972
5973
5974def rot47(s):
5975 return caesar(s, r'''!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~''', 47)
5976
5977
e154c651 5978def parse_m3u8_attributes(attrib):
5979 info = {}
5980 for (key, val) in re.findall(r'(?P<key>[A-Z0-9-]+)=(?P<val>"[^"]+"|[^",]+)(?:,|$)', attrib):
5981 if val.startswith('"'):
5982 val = val[1:-1]
5983 info[key] = val
5984 return info
1143535d
YCH
5985
5986
5987def urshift(val, n):
5988 return val >> n if val >= 0 else (val + 0x100000000) >> n
d3f8e038
YCH
5989
5990
5991# Based on png2str() written by @gdkchan and improved by @yokrysty
067aa17e 5992# Originally posted at https://github.com/ytdl-org/youtube-dl/issues/9706
d3f8e038
YCH
5993def decode_png(png_data):
5994 # Reference: https://www.w3.org/TR/PNG/
5995 header = png_data[8:]
5996
5997 if png_data[:8] != b'\x89PNG\x0d\x0a\x1a\x0a' or header[4:8] != b'IHDR':
5998 raise IOError('Not a valid PNG file.')
5999
6000 int_map = {1: '>B', 2: '>H', 4: '>I'}
6001 unpack_integer = lambda x: compat_struct_unpack(int_map[len(x)], x)[0]
6002
6003 chunks = []
6004
6005 while header:
6006 length = unpack_integer(header[:4])
6007 header = header[4:]
6008
6009 chunk_type = header[:4]
6010 header = header[4:]
6011
6012 chunk_data = header[:length]
6013 header = header[length:]
6014
6015 header = header[4:] # Skip CRC
6016
6017 chunks.append({
6018 'type': chunk_type,
6019 'length': length,
6020 'data': chunk_data
6021 })
6022
6023 ihdr = chunks[0]['data']
6024
6025 width = unpack_integer(ihdr[:4])
6026 height = unpack_integer(ihdr[4:8])
6027
6028 idat = b''
6029
6030 for chunk in chunks:
6031 if chunk['type'] == b'IDAT':
6032 idat += chunk['data']
6033
6034 if not idat:
6035 raise IOError('Unable to read PNG data.')
6036
6037 decompressed_data = bytearray(zlib.decompress(idat))
6038
6039 stride = width * 3
6040 pixels = []
6041
6042 def _get_pixel(idx):
6043 x = idx % stride
6044 y = idx // stride
6045 return pixels[y][x]
6046
6047 for y in range(height):
6048 basePos = y * (1 + stride)
6049 filter_type = decompressed_data[basePos]
6050
6051 current_row = []
6052
6053 pixels.append(current_row)
6054
6055 for x in range(stride):
6056 color = decompressed_data[1 + basePos + x]
6057 basex = y * stride + x
6058 left = 0
6059 up = 0
6060
6061 if x > 2:
6062 left = _get_pixel(basex - 3)
6063 if y > 0:
6064 up = _get_pixel(basex - stride)
6065
6066 if filter_type == 1: # Sub
6067 color = (color + left) & 0xff
6068 elif filter_type == 2: # Up
6069 color = (color + up) & 0xff
6070 elif filter_type == 3: # Average
6071 color = (color + ((left + up) >> 1)) & 0xff
6072 elif filter_type == 4: # Paeth
6073 a = left
6074 b = up
6075 c = 0
6076
6077 if x > 2 and y > 0:
6078 c = _get_pixel(basex - stride - 3)
6079
6080 p = a + b - c
6081
6082 pa = abs(p - a)
6083 pb = abs(p - b)
6084 pc = abs(p - c)
6085
6086 if pa <= pb and pa <= pc:
6087 color = (color + a) & 0xff
6088 elif pb <= pc:
6089 color = (color + b) & 0xff
6090 else:
6091 color = (color + c) & 0xff
6092
6093 current_row.append(color)
6094
6095 return width, height, pixels
efa97bdc
YCH
6096
6097
6098def write_xattr(path, key, value):
6099 # This mess below finds the best xattr tool for the job
6100 try:
6101 # try the pyxattr module...
6102 import xattr
6103
53a7e3d2
YCH
6104 if hasattr(xattr, 'set'): # pyxattr
6105 # Unicode arguments are not supported in python-pyxattr until
6106 # version 0.5.0
067aa17e 6107 # See https://github.com/ytdl-org/youtube-dl/issues/5498
53a7e3d2
YCH
6108 pyxattr_required_version = '0.5.0'
6109 if version_tuple(xattr.__version__) < version_tuple(pyxattr_required_version):
6110 # TODO: fallback to CLI tools
6111 raise XAttrUnavailableError(
6112 'python-pyxattr is detected but is too old. '
7a5c1cfe 6113 'yt-dlp requires %s or above while your version is %s. '
53a7e3d2
YCH
6114 'Falling back to other xattr implementations' % (
6115 pyxattr_required_version, xattr.__version__))
6116
6117 setxattr = xattr.set
6118 else: # xattr
6119 setxattr = xattr.setxattr
efa97bdc
YCH
6120
6121 try:
53a7e3d2 6122 setxattr(path, key, value)
efa97bdc
YCH
6123 except EnvironmentError as e:
6124 raise XAttrMetadataError(e.errno, e.strerror)
6125
6126 except ImportError:
6127 if compat_os_name == 'nt':
6128 # Write xattrs to NTFS Alternate Data Streams:
6129 # http://en.wikipedia.org/wiki/NTFS#Alternate_data_streams_.28ADS.29
6130 assert ':' not in key
6131 assert os.path.exists(path)
6132
6133 ads_fn = path + ':' + key
6134 try:
6135 with open(ads_fn, 'wb') as f:
6136 f.write(value)
6137 except EnvironmentError as e:
6138 raise XAttrMetadataError(e.errno, e.strerror)
6139 else:
6140 user_has_setfattr = check_executable('setfattr', ['--version'])
6141 user_has_xattr = check_executable('xattr', ['-h'])
6142
6143 if user_has_setfattr or user_has_xattr:
6144
6145 value = value.decode('utf-8')
6146 if user_has_setfattr:
6147 executable = 'setfattr'
6148 opts = ['-n', key, '-v', value]
6149 elif user_has_xattr:
6150 executable = 'xattr'
6151 opts = ['-w', key, value]
6152
3089bc74
S
6153 cmd = ([encodeFilename(executable, True)]
6154 + [encodeArgument(o) for o in opts]
6155 + [encodeFilename(path, True)])
efa97bdc
YCH
6156
6157 try:
6158 p = subprocess.Popen(
6159 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
6160 except EnvironmentError as e:
6161 raise XAttrMetadataError(e.errno, e.strerror)
f5b1bca9 6162 stdout, stderr = process_communicate_or_kill(p)
efa97bdc
YCH
6163 stderr = stderr.decode('utf-8', 'replace')
6164 if p.returncode != 0:
6165 raise XAttrMetadataError(p.returncode, stderr)
6166
6167 else:
6168 # On Unix, and can't find pyxattr, setfattr, or xattr.
6169 if sys.platform.startswith('linux'):
6170 raise XAttrUnavailableError(
6171 "Couldn't find a tool to set the xattrs. "
6172 "Install either the python 'pyxattr' or 'xattr' "
6173 "modules, or the GNU 'attr' package "
6174 "(which contains the 'setfattr' tool).")
6175 else:
6176 raise XAttrUnavailableError(
6177 "Couldn't find a tool to set the xattrs. "
6178 "Install either the python 'xattr' module, "
6179 "or the 'xattr' binary.")
0c265486
YCH
6180
6181
6182def random_birthday(year_field, month_field, day_field):
aa374bc7
AS
6183 start_date = datetime.date(1950, 1, 1)
6184 end_date = datetime.date(1995, 12, 31)
6185 offset = random.randint(0, (end_date - start_date).days)
6186 random_date = start_date + datetime.timedelta(offset)
0c265486 6187 return {
aa374bc7
AS
6188 year_field: str(random_date.year),
6189 month_field: str(random_date.month),
6190 day_field: str(random_date.day),
0c265486 6191 }
732044af 6192
c76eb41b 6193
732044af 6194# Templates for internet shortcut files, which are plain text files.
6195DOT_URL_LINK_TEMPLATE = '''
6196[InternetShortcut]
6197URL=%(url)s
6198'''.lstrip()
6199
6200DOT_WEBLOC_LINK_TEMPLATE = '''
6201<?xml version="1.0" encoding="UTF-8"?>
6202<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
6203<plist version="1.0">
6204<dict>
6205\t<key>URL</key>
6206\t<string>%(url)s</string>
6207</dict>
6208</plist>
6209'''.lstrip()
6210
6211DOT_DESKTOP_LINK_TEMPLATE = '''
6212[Desktop Entry]
6213Encoding=UTF-8
6214Name=%(filename)s
6215Type=Link
6216URL=%(url)s
6217Icon=text-html
6218'''.lstrip()
6219
6220
6221def iri_to_uri(iri):
6222 """
6223 Converts an IRI (Internationalized Resource Identifier, allowing Unicode characters) to a URI (Uniform Resource Identifier, ASCII-only).
6224
6225 The function doesn't add an additional layer of escaping; e.g., it doesn't escape `%3C` as `%253C`. Instead, it percent-escapes characters with an underlying UTF-8 encoding *besides* those already escaped, leaving the URI intact.
6226 """
6227
6228 iri_parts = compat_urllib_parse_urlparse(iri)
6229
6230 if '[' in iri_parts.netloc:
6231 raise ValueError('IPv6 URIs are not, yet, supported.')
6232 # Querying `.netloc`, when there's only one bracket, also raises a ValueError.
6233
6234 # The `safe` argument values, that the following code uses, contain the characters that should not be percent-encoded. Everything else but letters, digits and '_.-' will be percent-encoded with an underlying UTF-8 encoding. Everything already percent-encoded will be left as is.
6235
6236 net_location = ''
6237 if iri_parts.username:
6238 net_location += compat_urllib_parse_quote(iri_parts.username, safe=r"!$%&'()*+,~")
6239 if iri_parts.password is not None:
6240 net_location += ':' + compat_urllib_parse_quote(iri_parts.password, safe=r"!$%&'()*+,~")
6241 net_location += '@'
6242
6243 net_location += iri_parts.hostname.encode('idna').decode('utf-8') # Punycode for Unicode hostnames.
6244 # The 'idna' encoding produces ASCII text.
6245 if iri_parts.port is not None and iri_parts.port != 80:
6246 net_location += ':' + str(iri_parts.port)
6247
6248 return compat_urllib_parse_urlunparse(
6249 (iri_parts.scheme,
6250 net_location,
6251
6252 compat_urllib_parse_quote_plus(iri_parts.path, safe=r"!$%&'()*+,/:;=@|~"),
6253
6254 # Unsure about the `safe` argument, since this is a legacy way of handling parameters.
6255 compat_urllib_parse_quote_plus(iri_parts.params, safe=r"!$%&'()*+,/:;=@|~"),
6256
6257 # Not totally sure about the `safe` argument, since the source does not explicitly mention the query URI component.
6258 compat_urllib_parse_quote_plus(iri_parts.query, safe=r"!$%&'()*+,/:;=?@{|}~"),
6259
6260 compat_urllib_parse_quote_plus(iri_parts.fragment, safe=r"!#$%&'()*+,/:;=?@{|}~")))
6261
6262 # Source for `safe` arguments: https://url.spec.whatwg.org/#percent-encoded-bytes.
6263
6264
6265def to_high_limit_path(path):
6266 if sys.platform in ['win32', 'cygwin']:
6267 # Work around MAX_PATH limitation on Windows. The maximum allowed length for the individual path segments may still be quite limited.
6268 return r'\\?\ '.rstrip() + os.path.abspath(path)
6269
6270 return path
76d321f6 6271
c76eb41b 6272
b868936c 6273def format_field(obj, field=None, template='%s', ignore=(None, ''), default='', func=None):
6274 if field is None:
6275 val = obj if obj is not None else default
6276 else:
6277 val = obj.get(field, default)
76d321f6 6278 if func and val not in ignore:
6279 val = func(val)
6280 return template % val if val not in ignore else default
00dd0cd5 6281
6282
6283def clean_podcast_url(url):
6284 return re.sub(r'''(?x)
6285 (?:
6286 (?:
6287 chtbl\.com/track|
6288 media\.blubrry\.com| # https://create.blubrry.com/resources/podcast-media-download-statistics/getting-started/
6289 play\.podtrac\.com
6290 )/[^/]+|
6291 (?:dts|www)\.podtrac\.com/(?:pts/)?redirect\.[0-9a-z]{3,4}| # http://analytics.podtrac.com/how-to-measure
6292 flex\.acast\.com|
6293 pd(?:
6294 cn\.co| # https://podcorn.com/analytics-prefix/
6295 st\.fm # https://podsights.com/docs/
6296 )/e
6297 )/''', '', url)
ffcb8191
THD
6298
6299
6300_HEX_TABLE = '0123456789abcdef'
6301
6302
6303def random_uuidv4():
6304 return re.sub(r'[xy]', lambda x: _HEX_TABLE[random.randint(0, 15)], 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx')
0202b52a 6305
6306
6307def make_dir(path, to_screen=None):
6308 try:
6309 dn = os.path.dirname(path)
6310 if dn and not os.path.exists(dn):
6311 os.makedirs(dn)
6312 return True
6313 except (OSError, IOError) as err:
6314 if callable(to_screen) is not None:
6315 to_screen('unable to create directory ' + error_to_compat_str(err))
6316 return False
f74980cb 6317
6318
6319def get_executable_path():
c552ae88 6320 from zipimport import zipimporter
6321 if hasattr(sys, 'frozen'): # Running from PyInstaller
6322 path = os.path.dirname(sys.executable)
6323 elif isinstance(globals().get('__loader__'), zipimporter): # Running from ZIP
6324 path = os.path.join(os.path.dirname(__file__), '../..')
6325 else:
6326 path = os.path.join(os.path.dirname(__file__), '..')
f74980cb 6327 return os.path.abspath(path)
6328
6329
2f567473 6330def load_plugins(name, suffix, namespace):
3ae5e797 6331 classes = {}
f74980cb 6332 try:
019a94f7
ÁS
6333 plugins_spec = importlib.util.spec_from_file_location(
6334 name, os.path.join(get_executable_path(), 'ytdlp_plugins', name, '__init__.py'))
6335 plugins = importlib.util.module_from_spec(plugins_spec)
6336 sys.modules[plugins_spec.name] = plugins
6337 plugins_spec.loader.exec_module(plugins)
f74980cb 6338 for name in dir(plugins):
2f567473 6339 if name in namespace:
6340 continue
6341 if not name.endswith(suffix):
f74980cb 6342 continue
6343 klass = getattr(plugins, name)
3ae5e797 6344 classes[name] = namespace[name] = klass
019a94f7 6345 except FileNotFoundError:
f74980cb 6346 pass
f74980cb 6347 return classes
06167fbb 6348
6349
325ebc17 6350def traverse_obj(
352d63fd 6351 obj, *path_list, default=None, expected_type=None, get_all=True,
325ebc17 6352 casesense=True, is_user_input=False, traverse_string=False):
324ad820 6353 ''' Traverse nested list/dict/tuple
8f334380 6354 @param path_list A list of paths which are checked one by one.
6355 Each path is a list of keys where each key is a string,
2614f646 6356 a function, a tuple of strings or "...".
6357 When a fuction is given, it takes the key as argument and
6358 returns whether the key matches or not. When a tuple is given,
8f334380 6359 all the keys given in the tuple are traversed, and
6360 "..." traverses all the keys in the object
325ebc17 6361 @param default Default value to return
352d63fd 6362 @param expected_type Only accept final value of this type (Can also be any callable)
6363 @param get_all Return all the values obtained from a path or only the first one
324ad820 6364 @param casesense Whether to consider dictionary keys as case sensitive
6365 @param is_user_input Whether the keys are generated from user input. If True,
6366 strings are converted to int/slice if necessary
6367 @param traverse_string Whether to traverse inside strings. If True, any
6368 non-compatible object will also be converted into a string
8f334380 6369 # TODO: Write tests
324ad820 6370 '''
325ebc17 6371 if not casesense:
dbf5416a 6372 _lower = lambda k: (k.lower() if isinstance(k, str) else k)
8f334380 6373 path_list = (map(_lower, variadic(path)) for path in path_list)
6374
6375 def _traverse_obj(obj, path, _current_depth=0):
6376 nonlocal depth
575e17a1 6377 if obj is None:
6378 return None
8f334380 6379 path = tuple(variadic(path))
6380 for i, key in enumerate(path):
6381 if isinstance(key, (list, tuple)):
6382 obj = [_traverse_obj(obj, sub_key, _current_depth) for sub_key in key]
6383 key = ...
6384 if key is ...:
6385 obj = (obj.values() if isinstance(obj, dict)
6386 else obj if isinstance(obj, (list, tuple, LazyList))
6387 else str(obj) if traverse_string else [])
6388 _current_depth += 1
6389 depth = max(depth, _current_depth)
6390 return [_traverse_obj(inner_obj, path[i + 1:], _current_depth) for inner_obj in obj]
2614f646 6391 elif callable(key):
6392 if isinstance(obj, (list, tuple, LazyList)):
6393 obj = enumerate(obj)
6394 elif isinstance(obj, dict):
6395 obj = obj.items()
6396 else:
6397 if not traverse_string:
6398 return None
6399 obj = str(obj)
6400 _current_depth += 1
6401 depth = max(depth, _current_depth)
6402 return [_traverse_obj(v, path[i + 1:], _current_depth) for k, v in obj if key(k)]
575e17a1 6403 elif isinstance(obj, dict) and not (is_user_input and key == ':'):
325ebc17 6404 obj = (obj.get(key) if casesense or (key in obj)
6405 else next((v for k, v in obj.items() if _lower(k) == key), None))
6406 else:
6407 if is_user_input:
6408 key = (int_or_none(key) if ':' not in key
6409 else slice(*map(int_or_none, key.split(':'))))
8f334380 6410 if key == slice(None):
575e17a1 6411 return _traverse_obj(obj, (..., *path[i + 1:]), _current_depth)
325ebc17 6412 if not isinstance(key, (int, slice)):
9fea350f 6413 return None
8f334380 6414 if not isinstance(obj, (list, tuple, LazyList)):
325ebc17 6415 if not traverse_string:
6416 return None
6417 obj = str(obj)
6418 try:
6419 obj = obj[key]
6420 except IndexError:
324ad820 6421 return None
325ebc17 6422 return obj
6423
352d63fd 6424 if isinstance(expected_type, type):
6425 type_test = lambda val: val if isinstance(val, expected_type) else None
6426 elif expected_type is not None:
6427 type_test = expected_type
6428 else:
6429 type_test = lambda val: val
6430
8f334380 6431 for path in path_list:
6432 depth = 0
6433 val = _traverse_obj(obj, path)
325ebc17 6434 if val is not None:
8f334380 6435 if depth:
6436 for _ in range(depth - 1):
6586bca9 6437 val = itertools.chain.from_iterable(v for v in val if v is not None)
352d63fd 6438 val = [v for v in map(type_test, val) if v is not None]
8f334380 6439 if val:
352d63fd 6440 return val if get_all else val[0]
6441 else:
6442 val = type_test(val)
6443 if val is not None:
8f334380 6444 return val
325ebc17 6445 return default
324ad820 6446
6447
6448def traverse_dict(dictn, keys, casesense=True):
6449 ''' For backward compatibility. Do not use '''
6450 return traverse_obj(dictn, keys, casesense=casesense,
6451 is_user_input=True, traverse_string=True)
6606817a 6452
6453
c634ad2a 6454def variadic(x, allowed_types=(str, bytes)):
cb89cfc1 6455 return x if isinstance(x, collections.abc.Iterable) and not isinstance(x, allowed_types) else (x,)
bd50a52b
THD
6456
6457
49fa4d9a
N
6458# create a JSON Web Signature (jws) with HS256 algorithm
6459# the resulting format is in JWS Compact Serialization
6460# implemented following JWT https://www.rfc-editor.org/rfc/rfc7519.html
6461# implemented following JWS https://www.rfc-editor.org/rfc/rfc7515.html
6462def jwt_encode_hs256(payload_data, key, headers={}):
6463 header_data = {
6464 'alg': 'HS256',
6465 'typ': 'JWT',
6466 }
6467 if headers:
6468 header_data.update(headers)
6469 header_b64 = base64.b64encode(json.dumps(header_data).encode('utf-8'))
6470 payload_b64 = base64.b64encode(json.dumps(payload_data).encode('utf-8'))
6471 h = hmac.new(key.encode('utf-8'), header_b64 + b'.' + payload_b64, hashlib.sha256)
6472 signature_b64 = base64.b64encode(h.digest())
6473 token = header_b64 + b'.' + payload_b64 + b'.' + signature_b64
6474 return token
819e0531 6475
6476
6477def supports_terminal_sequences(stream):
6478 if compat_os_name == 'nt':
d1d5c08f 6479 if get_windows_version() < (10, 0, 10586):
819e0531 6480 return False
6481 elif not os.getenv('TERM'):
6482 return False
6483 try:
6484 return stream.isatty()
6485 except BaseException:
6486 return False
6487
6488
6489TERMINAL_SEQUENCES = {
6490 'DOWN': '\n',
6491 'UP': '\x1b[A',
6492 'ERASE_LINE': '\x1b[K',
6493 'RED': '\033[0;31m',
6494 'YELLOW': '\033[0;33m',
6495 'BLUE': '\033[0;34m',
6496 'RESET_STYLE': '\033[0m',
6497}