]> jfr.im git - yt-dlp.git/blame - yt_dlp/utils.py
[skeb] Add extractor (#1916)
[yt-dlp.git] / yt_dlp / utils.py
CommitLineData
cc52de43 1#!/usr/bin/env python3
dcdb292f 2# coding: utf-8
d77c3dfd 3
ecc0c5ee
PH
4from __future__ import unicode_literals
5
1e399778 6import base64
5bc880b9 7import binascii
912b38b4 8import calendar
676eb3f2 9import codecs
c380cc28 10import collections
62e609ab 11import contextlib
e3946f98 12import ctypes
c496ca96
PH
13import datetime
14import email.utils
0c265486 15import email.header
f45c185f 16import errno
be4a824d 17import functools
d77c3dfd 18import gzip
49fa4d9a
N
19import hashlib
20import hmac
019a94f7 21import importlib.util
03f9daab 22import io
79a2e94e 23import itertools
f4bfd65f 24import json
d77c3dfd 25import locale
02dbf93f 26import math
347de493 27import operator
d77c3dfd 28import os
c496ca96 29import platform
773f291d 30import random
d77c3dfd 31import re
c496ca96 32import socket
79a2e94e 33import ssl
1c088fa8 34import subprocess
d77c3dfd 35import sys
181c8655 36import tempfile
c380cc28 37import time
01951dda 38import traceback
bcf89ce6 39import xml.etree.ElementTree
d77c3dfd 40import zlib
2814f12b 41import mimetypes
d77c3dfd 42
8c25f81b 43from .compat import (
b4a3d461 44 compat_HTMLParseError,
8bb56eee 45 compat_HTMLParser,
201c1459 46 compat_HTTPError,
8f9312c3 47 compat_basestring,
8c25f81b 48 compat_chr,
1bab3437 49 compat_cookiejar,
d7cd9a9e 50 compat_ctypes_WINFUNCTYPE,
36e6f62c 51 compat_etree_fromstring,
51098426 52 compat_expanduser,
8c25f81b 53 compat_html_entities,
55b2f099 54 compat_html_entities_html5,
be4a824d 55 compat_http_client,
42db58ec 56 compat_integer_types,
e29663c6 57 compat_numeric_types,
c86b6142 58 compat_kwargs,
efa97bdc 59 compat_os_name,
8c25f81b 60 compat_parse_qs,
702ccf2d 61 compat_shlex_quote,
8c25f81b 62 compat_str,
edaa23f8 63 compat_struct_pack,
d3f8e038 64 compat_struct_unpack,
8c25f81b
PH
65 compat_urllib_error,
66 compat_urllib_parse,
15707c7e 67 compat_urllib_parse_urlencode,
8c25f81b 68 compat_urllib_parse_urlparse,
732044af 69 compat_urllib_parse_urlunparse,
70 compat_urllib_parse_quote,
71 compat_urllib_parse_quote_plus,
7581bfc9 72 compat_urllib_parse_unquote_plus,
8c25f81b
PH
73 compat_urllib_request,
74 compat_urlparse,
810c10ba 75 compat_xpath,
8c25f81b 76)
4644ac55 77
71aff188
YCH
78from .socks import (
79 ProxyType,
80 sockssocket,
81)
82
4644ac55 83
51fb4995
YCH
84def register_socks_protocols():
85 # "Register" SOCKS protocols
d5ae6bb5
YCH
86 # In Python < 2.6.5, urlsplit() suffers from bug https://bugs.python.org/issue7904
87 # URLs with protocols not in urlparse.uses_netloc are not handled correctly
51fb4995
YCH
88 for scheme in ('socks', 'socks4', 'socks4a', 'socks5'):
89 if scheme not in compat_urlparse.uses_netloc:
90 compat_urlparse.uses_netloc.append(scheme)
91
92
468e2e92
FV
93# This is not clearly defined otherwise
94compiled_regex_type = type(re.compile(''))
95
f7a147e3
S
96
97def random_user_agent():
98 _USER_AGENT_TPL = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36'
99 _CHROME_VERSIONS = (
100 '74.0.3729.129',
101 '76.0.3780.3',
102 '76.0.3780.2',
103 '74.0.3729.128',
104 '76.0.3780.1',
105 '76.0.3780.0',
106 '75.0.3770.15',
107 '74.0.3729.127',
108 '74.0.3729.126',
109 '76.0.3779.1',
110 '76.0.3779.0',
111 '75.0.3770.14',
112 '74.0.3729.125',
113 '76.0.3778.1',
114 '76.0.3778.0',
115 '75.0.3770.13',
116 '74.0.3729.124',
117 '74.0.3729.123',
118 '73.0.3683.121',
119 '76.0.3777.1',
120 '76.0.3777.0',
121 '75.0.3770.12',
122 '74.0.3729.122',
123 '76.0.3776.4',
124 '75.0.3770.11',
125 '74.0.3729.121',
126 '76.0.3776.3',
127 '76.0.3776.2',
128 '73.0.3683.120',
129 '74.0.3729.120',
130 '74.0.3729.119',
131 '74.0.3729.118',
132 '76.0.3776.1',
133 '76.0.3776.0',
134 '76.0.3775.5',
135 '75.0.3770.10',
136 '74.0.3729.117',
137 '76.0.3775.4',
138 '76.0.3775.3',
139 '74.0.3729.116',
140 '75.0.3770.9',
141 '76.0.3775.2',
142 '76.0.3775.1',
143 '76.0.3775.0',
144 '75.0.3770.8',
145 '74.0.3729.115',
146 '74.0.3729.114',
147 '76.0.3774.1',
148 '76.0.3774.0',
149 '75.0.3770.7',
150 '74.0.3729.113',
151 '74.0.3729.112',
152 '74.0.3729.111',
153 '76.0.3773.1',
154 '76.0.3773.0',
155 '75.0.3770.6',
156 '74.0.3729.110',
157 '74.0.3729.109',
158 '76.0.3772.1',
159 '76.0.3772.0',
160 '75.0.3770.5',
161 '74.0.3729.108',
162 '74.0.3729.107',
163 '76.0.3771.1',
164 '76.0.3771.0',
165 '75.0.3770.4',
166 '74.0.3729.106',
167 '74.0.3729.105',
168 '75.0.3770.3',
169 '74.0.3729.104',
170 '74.0.3729.103',
171 '74.0.3729.102',
172 '75.0.3770.2',
173 '74.0.3729.101',
174 '75.0.3770.1',
175 '75.0.3770.0',
176 '74.0.3729.100',
177 '75.0.3769.5',
178 '75.0.3769.4',
179 '74.0.3729.99',
180 '75.0.3769.3',
181 '75.0.3769.2',
182 '75.0.3768.6',
183 '74.0.3729.98',
184 '75.0.3769.1',
185 '75.0.3769.0',
186 '74.0.3729.97',
187 '73.0.3683.119',
188 '73.0.3683.118',
189 '74.0.3729.96',
190 '75.0.3768.5',
191 '75.0.3768.4',
192 '75.0.3768.3',
193 '75.0.3768.2',
194 '74.0.3729.95',
195 '74.0.3729.94',
196 '75.0.3768.1',
197 '75.0.3768.0',
198 '74.0.3729.93',
199 '74.0.3729.92',
200 '73.0.3683.117',
201 '74.0.3729.91',
202 '75.0.3766.3',
203 '74.0.3729.90',
204 '75.0.3767.2',
205 '75.0.3767.1',
206 '75.0.3767.0',
207 '74.0.3729.89',
208 '73.0.3683.116',
209 '75.0.3766.2',
210 '74.0.3729.88',
211 '75.0.3766.1',
212 '75.0.3766.0',
213 '74.0.3729.87',
214 '73.0.3683.115',
215 '74.0.3729.86',
216 '75.0.3765.1',
217 '75.0.3765.0',
218 '74.0.3729.85',
219 '73.0.3683.114',
220 '74.0.3729.84',
221 '75.0.3764.1',
222 '75.0.3764.0',
223 '74.0.3729.83',
224 '73.0.3683.113',
225 '75.0.3763.2',
226 '75.0.3761.4',
227 '74.0.3729.82',
228 '75.0.3763.1',
229 '75.0.3763.0',
230 '74.0.3729.81',
231 '73.0.3683.112',
232 '75.0.3762.1',
233 '75.0.3762.0',
234 '74.0.3729.80',
235 '75.0.3761.3',
236 '74.0.3729.79',
237 '73.0.3683.111',
238 '75.0.3761.2',
239 '74.0.3729.78',
240 '74.0.3729.77',
241 '75.0.3761.1',
242 '75.0.3761.0',
243 '73.0.3683.110',
244 '74.0.3729.76',
245 '74.0.3729.75',
246 '75.0.3760.0',
247 '74.0.3729.74',
248 '75.0.3759.8',
249 '75.0.3759.7',
250 '75.0.3759.6',
251 '74.0.3729.73',
252 '75.0.3759.5',
253 '74.0.3729.72',
254 '73.0.3683.109',
255 '75.0.3759.4',
256 '75.0.3759.3',
257 '74.0.3729.71',
258 '75.0.3759.2',
259 '74.0.3729.70',
260 '73.0.3683.108',
261 '74.0.3729.69',
262 '75.0.3759.1',
263 '75.0.3759.0',
264 '74.0.3729.68',
265 '73.0.3683.107',
266 '74.0.3729.67',
267 '75.0.3758.1',
268 '75.0.3758.0',
269 '74.0.3729.66',
270 '73.0.3683.106',
271 '74.0.3729.65',
272 '75.0.3757.1',
273 '75.0.3757.0',
274 '74.0.3729.64',
275 '73.0.3683.105',
276 '74.0.3729.63',
277 '75.0.3756.1',
278 '75.0.3756.0',
279 '74.0.3729.62',
280 '73.0.3683.104',
281 '75.0.3755.3',
282 '75.0.3755.2',
283 '73.0.3683.103',
284 '75.0.3755.1',
285 '75.0.3755.0',
286 '74.0.3729.61',
287 '73.0.3683.102',
288 '74.0.3729.60',
289 '75.0.3754.2',
290 '74.0.3729.59',
291 '75.0.3753.4',
292 '74.0.3729.58',
293 '75.0.3754.1',
294 '75.0.3754.0',
295 '74.0.3729.57',
296 '73.0.3683.101',
297 '75.0.3753.3',
298 '75.0.3752.2',
299 '75.0.3753.2',
300 '74.0.3729.56',
301 '75.0.3753.1',
302 '75.0.3753.0',
303 '74.0.3729.55',
304 '73.0.3683.100',
305 '74.0.3729.54',
306 '75.0.3752.1',
307 '75.0.3752.0',
308 '74.0.3729.53',
309 '73.0.3683.99',
310 '74.0.3729.52',
311 '75.0.3751.1',
312 '75.0.3751.0',
313 '74.0.3729.51',
314 '73.0.3683.98',
315 '74.0.3729.50',
316 '75.0.3750.0',
317 '74.0.3729.49',
318 '74.0.3729.48',
319 '74.0.3729.47',
320 '75.0.3749.3',
321 '74.0.3729.46',
322 '73.0.3683.97',
323 '75.0.3749.2',
324 '74.0.3729.45',
325 '75.0.3749.1',
326 '75.0.3749.0',
327 '74.0.3729.44',
328 '73.0.3683.96',
329 '74.0.3729.43',
330 '74.0.3729.42',
331 '75.0.3748.1',
332 '75.0.3748.0',
333 '74.0.3729.41',
334 '75.0.3747.1',
335 '73.0.3683.95',
336 '75.0.3746.4',
337 '74.0.3729.40',
338 '74.0.3729.39',
339 '75.0.3747.0',
340 '75.0.3746.3',
341 '75.0.3746.2',
342 '74.0.3729.38',
343 '75.0.3746.1',
344 '75.0.3746.0',
345 '74.0.3729.37',
346 '73.0.3683.94',
347 '75.0.3745.5',
348 '75.0.3745.4',
349 '75.0.3745.3',
350 '75.0.3745.2',
351 '74.0.3729.36',
352 '75.0.3745.1',
353 '75.0.3745.0',
354 '75.0.3744.2',
355 '74.0.3729.35',
356 '73.0.3683.93',
357 '74.0.3729.34',
358 '75.0.3744.1',
359 '75.0.3744.0',
360 '74.0.3729.33',
361 '73.0.3683.92',
362 '74.0.3729.32',
363 '74.0.3729.31',
364 '73.0.3683.91',
365 '75.0.3741.2',
366 '75.0.3740.5',
367 '74.0.3729.30',
368 '75.0.3741.1',
369 '75.0.3741.0',
370 '74.0.3729.29',
371 '75.0.3740.4',
372 '73.0.3683.90',
373 '74.0.3729.28',
374 '75.0.3740.3',
375 '73.0.3683.89',
376 '75.0.3740.2',
377 '74.0.3729.27',
378 '75.0.3740.1',
379 '75.0.3740.0',
380 '74.0.3729.26',
381 '73.0.3683.88',
382 '73.0.3683.87',
383 '74.0.3729.25',
384 '75.0.3739.1',
385 '75.0.3739.0',
386 '73.0.3683.86',
387 '74.0.3729.24',
388 '73.0.3683.85',
389 '75.0.3738.4',
390 '75.0.3738.3',
391 '75.0.3738.2',
392 '75.0.3738.1',
393 '75.0.3738.0',
394 '74.0.3729.23',
395 '73.0.3683.84',
396 '74.0.3729.22',
397 '74.0.3729.21',
398 '75.0.3737.1',
399 '75.0.3737.0',
400 '74.0.3729.20',
401 '73.0.3683.83',
402 '74.0.3729.19',
403 '75.0.3736.1',
404 '75.0.3736.0',
405 '74.0.3729.18',
406 '73.0.3683.82',
407 '74.0.3729.17',
408 '75.0.3735.1',
409 '75.0.3735.0',
410 '74.0.3729.16',
411 '73.0.3683.81',
412 '75.0.3734.1',
413 '75.0.3734.0',
414 '74.0.3729.15',
415 '73.0.3683.80',
416 '74.0.3729.14',
417 '75.0.3733.1',
418 '75.0.3733.0',
419 '75.0.3732.1',
420 '74.0.3729.13',
421 '74.0.3729.12',
422 '73.0.3683.79',
423 '74.0.3729.11',
424 '75.0.3732.0',
425 '74.0.3729.10',
426 '73.0.3683.78',
427 '74.0.3729.9',
428 '74.0.3729.8',
429 '74.0.3729.7',
430 '75.0.3731.3',
431 '75.0.3731.2',
432 '75.0.3731.0',
433 '74.0.3729.6',
434 '73.0.3683.77',
435 '73.0.3683.76',
436 '75.0.3730.5',
437 '75.0.3730.4',
438 '73.0.3683.75',
439 '74.0.3729.5',
440 '73.0.3683.74',
441 '75.0.3730.3',
442 '75.0.3730.2',
443 '74.0.3729.4',
444 '73.0.3683.73',
445 '73.0.3683.72',
446 '75.0.3730.1',
447 '75.0.3730.0',
448 '74.0.3729.3',
449 '73.0.3683.71',
450 '74.0.3729.2',
451 '73.0.3683.70',
452 '74.0.3729.1',
453 '74.0.3729.0',
454 '74.0.3726.4',
455 '73.0.3683.69',
456 '74.0.3726.3',
457 '74.0.3728.0',
458 '74.0.3726.2',
459 '73.0.3683.68',
460 '74.0.3726.1',
461 '74.0.3726.0',
462 '74.0.3725.4',
463 '73.0.3683.67',
464 '73.0.3683.66',
465 '74.0.3725.3',
466 '74.0.3725.2',
467 '74.0.3725.1',
468 '74.0.3724.8',
469 '74.0.3725.0',
470 '73.0.3683.65',
471 '74.0.3724.7',
472 '74.0.3724.6',
473 '74.0.3724.5',
474 '74.0.3724.4',
475 '74.0.3724.3',
476 '74.0.3724.2',
477 '74.0.3724.1',
478 '74.0.3724.0',
479 '73.0.3683.64',
480 '74.0.3723.1',
481 '74.0.3723.0',
482 '73.0.3683.63',
483 '74.0.3722.1',
484 '74.0.3722.0',
485 '73.0.3683.62',
486 '74.0.3718.9',
487 '74.0.3702.3',
488 '74.0.3721.3',
489 '74.0.3721.2',
490 '74.0.3721.1',
491 '74.0.3721.0',
492 '74.0.3720.6',
493 '73.0.3683.61',
494 '72.0.3626.122',
495 '73.0.3683.60',
496 '74.0.3720.5',
497 '72.0.3626.121',
498 '74.0.3718.8',
499 '74.0.3720.4',
500 '74.0.3720.3',
501 '74.0.3718.7',
502 '74.0.3720.2',
503 '74.0.3720.1',
504 '74.0.3720.0',
505 '74.0.3718.6',
506 '74.0.3719.5',
507 '73.0.3683.59',
508 '74.0.3718.5',
509 '74.0.3718.4',
510 '74.0.3719.4',
511 '74.0.3719.3',
512 '74.0.3719.2',
513 '74.0.3719.1',
514 '73.0.3683.58',
515 '74.0.3719.0',
516 '73.0.3683.57',
517 '73.0.3683.56',
518 '74.0.3718.3',
519 '73.0.3683.55',
520 '74.0.3718.2',
521 '74.0.3718.1',
522 '74.0.3718.0',
523 '73.0.3683.54',
524 '74.0.3717.2',
525 '73.0.3683.53',
526 '74.0.3717.1',
527 '74.0.3717.0',
528 '73.0.3683.52',
529 '74.0.3716.1',
530 '74.0.3716.0',
531 '73.0.3683.51',
532 '74.0.3715.1',
533 '74.0.3715.0',
534 '73.0.3683.50',
535 '74.0.3711.2',
536 '74.0.3714.2',
537 '74.0.3713.3',
538 '74.0.3714.1',
539 '74.0.3714.0',
540 '73.0.3683.49',
541 '74.0.3713.1',
542 '74.0.3713.0',
543 '72.0.3626.120',
544 '73.0.3683.48',
545 '74.0.3712.2',
546 '74.0.3712.1',
547 '74.0.3712.0',
548 '73.0.3683.47',
549 '72.0.3626.119',
550 '73.0.3683.46',
551 '74.0.3710.2',
552 '72.0.3626.118',
553 '74.0.3711.1',
554 '74.0.3711.0',
555 '73.0.3683.45',
556 '72.0.3626.117',
557 '74.0.3710.1',
558 '74.0.3710.0',
559 '73.0.3683.44',
560 '72.0.3626.116',
561 '74.0.3709.1',
562 '74.0.3709.0',
563 '74.0.3704.9',
564 '73.0.3683.43',
565 '72.0.3626.115',
566 '74.0.3704.8',
567 '74.0.3704.7',
568 '74.0.3708.0',
569 '74.0.3706.7',
570 '74.0.3704.6',
571 '73.0.3683.42',
572 '72.0.3626.114',
573 '74.0.3706.6',
574 '72.0.3626.113',
575 '74.0.3704.5',
576 '74.0.3706.5',
577 '74.0.3706.4',
578 '74.0.3706.3',
579 '74.0.3706.2',
580 '74.0.3706.1',
581 '74.0.3706.0',
582 '73.0.3683.41',
583 '72.0.3626.112',
584 '74.0.3705.1',
585 '74.0.3705.0',
586 '73.0.3683.40',
587 '72.0.3626.111',
588 '73.0.3683.39',
589 '74.0.3704.4',
590 '73.0.3683.38',
591 '74.0.3704.3',
592 '74.0.3704.2',
593 '74.0.3704.1',
594 '74.0.3704.0',
595 '73.0.3683.37',
596 '72.0.3626.110',
597 '72.0.3626.109',
598 '74.0.3703.3',
599 '74.0.3703.2',
600 '73.0.3683.36',
601 '74.0.3703.1',
602 '74.0.3703.0',
603 '73.0.3683.35',
604 '72.0.3626.108',
605 '74.0.3702.2',
606 '74.0.3699.3',
607 '74.0.3702.1',
608 '74.0.3702.0',
609 '73.0.3683.34',
610 '72.0.3626.107',
611 '73.0.3683.33',
612 '74.0.3701.1',
613 '74.0.3701.0',
614 '73.0.3683.32',
615 '73.0.3683.31',
616 '72.0.3626.105',
617 '74.0.3700.1',
618 '74.0.3700.0',
619 '73.0.3683.29',
620 '72.0.3626.103',
621 '74.0.3699.2',
622 '74.0.3699.1',
623 '74.0.3699.0',
624 '73.0.3683.28',
625 '72.0.3626.102',
626 '73.0.3683.27',
627 '73.0.3683.26',
628 '74.0.3698.0',
629 '74.0.3696.2',
630 '72.0.3626.101',
631 '73.0.3683.25',
632 '74.0.3696.1',
633 '74.0.3696.0',
634 '74.0.3694.8',
635 '72.0.3626.100',
636 '74.0.3694.7',
637 '74.0.3694.6',
638 '74.0.3694.5',
639 '74.0.3694.4',
640 '72.0.3626.99',
641 '72.0.3626.98',
642 '74.0.3694.3',
643 '73.0.3683.24',
644 '72.0.3626.97',
645 '72.0.3626.96',
646 '72.0.3626.95',
647 '73.0.3683.23',
648 '72.0.3626.94',
649 '73.0.3683.22',
650 '73.0.3683.21',
651 '72.0.3626.93',
652 '74.0.3694.2',
653 '72.0.3626.92',
654 '74.0.3694.1',
655 '74.0.3694.0',
656 '74.0.3693.6',
657 '73.0.3683.20',
658 '72.0.3626.91',
659 '74.0.3693.5',
660 '74.0.3693.4',
661 '74.0.3693.3',
662 '74.0.3693.2',
663 '73.0.3683.19',
664 '74.0.3693.1',
665 '74.0.3693.0',
666 '73.0.3683.18',
667 '72.0.3626.90',
668 '74.0.3692.1',
669 '74.0.3692.0',
670 '73.0.3683.17',
671 '72.0.3626.89',
672 '74.0.3687.3',
673 '74.0.3691.1',
674 '74.0.3691.0',
675 '73.0.3683.16',
676 '72.0.3626.88',
677 '72.0.3626.87',
678 '73.0.3683.15',
679 '74.0.3690.1',
680 '74.0.3690.0',
681 '73.0.3683.14',
682 '72.0.3626.86',
683 '73.0.3683.13',
684 '73.0.3683.12',
685 '74.0.3689.1',
686 '74.0.3689.0',
687 '73.0.3683.11',
688 '72.0.3626.85',
689 '73.0.3683.10',
690 '72.0.3626.84',
691 '73.0.3683.9',
692 '74.0.3688.1',
693 '74.0.3688.0',
694 '73.0.3683.8',
695 '72.0.3626.83',
696 '74.0.3687.2',
697 '74.0.3687.1',
698 '74.0.3687.0',
699 '73.0.3683.7',
700 '72.0.3626.82',
701 '74.0.3686.4',
702 '72.0.3626.81',
703 '74.0.3686.3',
704 '74.0.3686.2',
705 '74.0.3686.1',
706 '74.0.3686.0',
707 '73.0.3683.6',
708 '72.0.3626.80',
709 '74.0.3685.1',
710 '74.0.3685.0',
711 '73.0.3683.5',
712 '72.0.3626.79',
713 '74.0.3684.1',
714 '74.0.3684.0',
715 '73.0.3683.4',
716 '72.0.3626.78',
717 '72.0.3626.77',
718 '73.0.3683.3',
719 '73.0.3683.2',
720 '72.0.3626.76',
721 '73.0.3683.1',
722 '73.0.3683.0',
723 '72.0.3626.75',
724 '71.0.3578.141',
725 '73.0.3682.1',
726 '73.0.3682.0',
727 '72.0.3626.74',
728 '71.0.3578.140',
729 '73.0.3681.4',
730 '73.0.3681.3',
731 '73.0.3681.2',
732 '73.0.3681.1',
733 '73.0.3681.0',
734 '72.0.3626.73',
735 '71.0.3578.139',
736 '72.0.3626.72',
737 '72.0.3626.71',
738 '73.0.3680.1',
739 '73.0.3680.0',
740 '72.0.3626.70',
741 '71.0.3578.138',
742 '73.0.3678.2',
743 '73.0.3679.1',
744 '73.0.3679.0',
745 '72.0.3626.69',
746 '71.0.3578.137',
747 '73.0.3678.1',
748 '73.0.3678.0',
749 '71.0.3578.136',
750 '73.0.3677.1',
751 '73.0.3677.0',
752 '72.0.3626.68',
753 '72.0.3626.67',
754 '71.0.3578.135',
755 '73.0.3676.1',
756 '73.0.3676.0',
757 '73.0.3674.2',
758 '72.0.3626.66',
759 '71.0.3578.134',
760 '73.0.3674.1',
761 '73.0.3674.0',
762 '72.0.3626.65',
763 '71.0.3578.133',
764 '73.0.3673.2',
765 '73.0.3673.1',
766 '73.0.3673.0',
767 '72.0.3626.64',
768 '71.0.3578.132',
769 '72.0.3626.63',
770 '72.0.3626.62',
771 '72.0.3626.61',
772 '72.0.3626.60',
773 '73.0.3672.1',
774 '73.0.3672.0',
775 '72.0.3626.59',
776 '71.0.3578.131',
777 '73.0.3671.3',
778 '73.0.3671.2',
779 '73.0.3671.1',
780 '73.0.3671.0',
781 '72.0.3626.58',
782 '71.0.3578.130',
783 '73.0.3670.1',
784 '73.0.3670.0',
785 '72.0.3626.57',
786 '71.0.3578.129',
787 '73.0.3669.1',
788 '73.0.3669.0',
789 '72.0.3626.56',
790 '71.0.3578.128',
791 '73.0.3668.2',
792 '73.0.3668.1',
793 '73.0.3668.0',
794 '72.0.3626.55',
795 '71.0.3578.127',
796 '73.0.3667.2',
797 '73.0.3667.1',
798 '73.0.3667.0',
799 '72.0.3626.54',
800 '71.0.3578.126',
801 '73.0.3666.1',
802 '73.0.3666.0',
803 '72.0.3626.53',
804 '71.0.3578.125',
805 '73.0.3665.4',
806 '73.0.3665.3',
807 '72.0.3626.52',
808 '73.0.3665.2',
809 '73.0.3664.4',
810 '73.0.3665.1',
811 '73.0.3665.0',
812 '72.0.3626.51',
813 '71.0.3578.124',
814 '72.0.3626.50',
815 '73.0.3664.3',
816 '73.0.3664.2',
817 '73.0.3664.1',
818 '73.0.3664.0',
819 '73.0.3663.2',
820 '72.0.3626.49',
821 '71.0.3578.123',
822 '73.0.3663.1',
823 '73.0.3663.0',
824 '72.0.3626.48',
825 '71.0.3578.122',
826 '73.0.3662.1',
827 '73.0.3662.0',
828 '72.0.3626.47',
829 '71.0.3578.121',
830 '73.0.3661.1',
831 '72.0.3626.46',
832 '73.0.3661.0',
833 '72.0.3626.45',
834 '71.0.3578.120',
835 '73.0.3660.2',
836 '73.0.3660.1',
837 '73.0.3660.0',
838 '72.0.3626.44',
839 '71.0.3578.119',
840 '73.0.3659.1',
841 '73.0.3659.0',
842 '72.0.3626.43',
843 '71.0.3578.118',
844 '73.0.3658.1',
845 '73.0.3658.0',
846 '72.0.3626.42',
847 '71.0.3578.117',
848 '73.0.3657.1',
849 '73.0.3657.0',
850 '72.0.3626.41',
851 '71.0.3578.116',
852 '73.0.3656.1',
853 '73.0.3656.0',
854 '72.0.3626.40',
855 '71.0.3578.115',
856 '73.0.3655.1',
857 '73.0.3655.0',
858 '72.0.3626.39',
859 '71.0.3578.114',
860 '73.0.3654.1',
861 '73.0.3654.0',
862 '72.0.3626.38',
863 '71.0.3578.113',
864 '73.0.3653.1',
865 '73.0.3653.0',
866 '72.0.3626.37',
867 '71.0.3578.112',
868 '73.0.3652.1',
869 '73.0.3652.0',
870 '72.0.3626.36',
871 '71.0.3578.111',
872 '73.0.3651.1',
873 '73.0.3651.0',
874 '72.0.3626.35',
875 '71.0.3578.110',
876 '73.0.3650.1',
877 '73.0.3650.0',
878 '72.0.3626.34',
879 '71.0.3578.109',
880 '73.0.3649.1',
881 '73.0.3649.0',
882 '72.0.3626.33',
883 '71.0.3578.108',
884 '73.0.3648.2',
885 '73.0.3648.1',
886 '73.0.3648.0',
887 '72.0.3626.32',
888 '71.0.3578.107',
889 '73.0.3647.2',
890 '73.0.3647.1',
891 '73.0.3647.0',
892 '72.0.3626.31',
893 '71.0.3578.106',
894 '73.0.3635.3',
895 '73.0.3646.2',
896 '73.0.3646.1',
897 '73.0.3646.0',
898 '72.0.3626.30',
899 '71.0.3578.105',
900 '72.0.3626.29',
901 '73.0.3645.2',
902 '73.0.3645.1',
903 '73.0.3645.0',
904 '72.0.3626.28',
905 '71.0.3578.104',
906 '72.0.3626.27',
907 '72.0.3626.26',
908 '72.0.3626.25',
909 '72.0.3626.24',
910 '73.0.3644.0',
911 '73.0.3643.2',
912 '72.0.3626.23',
913 '71.0.3578.103',
914 '73.0.3643.1',
915 '73.0.3643.0',
916 '72.0.3626.22',
917 '71.0.3578.102',
918 '73.0.3642.1',
919 '73.0.3642.0',
920 '72.0.3626.21',
921 '71.0.3578.101',
922 '73.0.3641.1',
923 '73.0.3641.0',
924 '72.0.3626.20',
925 '71.0.3578.100',
926 '72.0.3626.19',
927 '73.0.3640.1',
928 '73.0.3640.0',
929 '72.0.3626.18',
930 '73.0.3639.1',
931 '71.0.3578.99',
932 '73.0.3639.0',
933 '72.0.3626.17',
934 '73.0.3638.2',
935 '72.0.3626.16',
936 '73.0.3638.1',
937 '73.0.3638.0',
938 '72.0.3626.15',
939 '71.0.3578.98',
940 '73.0.3635.2',
941 '71.0.3578.97',
942 '73.0.3637.1',
943 '73.0.3637.0',
944 '72.0.3626.14',
945 '71.0.3578.96',
946 '71.0.3578.95',
947 '72.0.3626.13',
948 '71.0.3578.94',
949 '73.0.3636.2',
950 '71.0.3578.93',
951 '73.0.3636.1',
952 '73.0.3636.0',
953 '72.0.3626.12',
954 '71.0.3578.92',
955 '73.0.3635.1',
956 '73.0.3635.0',
957 '72.0.3626.11',
958 '71.0.3578.91',
959 '73.0.3634.2',
960 '73.0.3634.1',
961 '73.0.3634.0',
962 '72.0.3626.10',
963 '71.0.3578.90',
964 '71.0.3578.89',
965 '73.0.3633.2',
966 '73.0.3633.1',
967 '73.0.3633.0',
968 '72.0.3610.4',
969 '72.0.3626.9',
970 '71.0.3578.88',
971 '73.0.3632.5',
972 '73.0.3632.4',
973 '73.0.3632.3',
974 '73.0.3632.2',
975 '73.0.3632.1',
976 '73.0.3632.0',
977 '72.0.3626.8',
978 '71.0.3578.87',
979 '73.0.3631.2',
980 '73.0.3631.1',
981 '73.0.3631.0',
982 '72.0.3626.7',
983 '71.0.3578.86',
984 '72.0.3626.6',
985 '73.0.3630.1',
986 '73.0.3630.0',
987 '72.0.3626.5',
988 '71.0.3578.85',
989 '72.0.3626.4',
990 '73.0.3628.3',
991 '73.0.3628.2',
992 '73.0.3629.1',
993 '73.0.3629.0',
994 '72.0.3626.3',
995 '71.0.3578.84',
996 '73.0.3628.1',
997 '73.0.3628.0',
998 '71.0.3578.83',
999 '73.0.3627.1',
1000 '73.0.3627.0',
1001 '72.0.3626.2',
1002 '71.0.3578.82',
1003 '71.0.3578.81',
1004 '71.0.3578.80',
1005 '72.0.3626.1',
1006 '72.0.3626.0',
1007 '71.0.3578.79',
1008 '70.0.3538.124',
1009 '71.0.3578.78',
1010 '72.0.3623.4',
1011 '72.0.3625.2',
1012 '72.0.3625.1',
1013 '72.0.3625.0',
1014 '71.0.3578.77',
1015 '70.0.3538.123',
1016 '72.0.3624.4',
1017 '72.0.3624.3',
1018 '72.0.3624.2',
1019 '71.0.3578.76',
1020 '72.0.3624.1',
1021 '72.0.3624.0',
1022 '72.0.3623.3',
1023 '71.0.3578.75',
1024 '70.0.3538.122',
1025 '71.0.3578.74',
1026 '72.0.3623.2',
1027 '72.0.3610.3',
1028 '72.0.3623.1',
1029 '72.0.3623.0',
1030 '72.0.3622.3',
1031 '72.0.3622.2',
1032 '71.0.3578.73',
1033 '70.0.3538.121',
1034 '72.0.3622.1',
1035 '72.0.3622.0',
1036 '71.0.3578.72',
1037 '70.0.3538.120',
1038 '72.0.3621.1',
1039 '72.0.3621.0',
1040 '71.0.3578.71',
1041 '70.0.3538.119',
1042 '72.0.3620.1',
1043 '72.0.3620.0',
1044 '71.0.3578.70',
1045 '70.0.3538.118',
1046 '71.0.3578.69',
1047 '72.0.3619.1',
1048 '72.0.3619.0',
1049 '71.0.3578.68',
1050 '70.0.3538.117',
1051 '71.0.3578.67',
1052 '72.0.3618.1',
1053 '72.0.3618.0',
1054 '71.0.3578.66',
1055 '70.0.3538.116',
1056 '72.0.3617.1',
1057 '72.0.3617.0',
1058 '71.0.3578.65',
1059 '70.0.3538.115',
1060 '72.0.3602.3',
1061 '71.0.3578.64',
1062 '72.0.3616.1',
1063 '72.0.3616.0',
1064 '71.0.3578.63',
1065 '70.0.3538.114',
1066 '71.0.3578.62',
1067 '72.0.3615.1',
1068 '72.0.3615.0',
1069 '71.0.3578.61',
1070 '70.0.3538.113',
1071 '72.0.3614.1',
1072 '72.0.3614.0',
1073 '71.0.3578.60',
1074 '70.0.3538.112',
1075 '72.0.3613.1',
1076 '72.0.3613.0',
1077 '71.0.3578.59',
1078 '70.0.3538.111',
1079 '72.0.3612.2',
1080 '72.0.3612.1',
1081 '72.0.3612.0',
1082 '70.0.3538.110',
1083 '71.0.3578.58',
1084 '70.0.3538.109',
1085 '72.0.3611.2',
1086 '72.0.3611.1',
1087 '72.0.3611.0',
1088 '71.0.3578.57',
1089 '70.0.3538.108',
1090 '72.0.3610.2',
1091 '71.0.3578.56',
1092 '71.0.3578.55',
1093 '72.0.3610.1',
1094 '72.0.3610.0',
1095 '71.0.3578.54',
1096 '70.0.3538.107',
1097 '71.0.3578.53',
1098 '72.0.3609.3',
1099 '71.0.3578.52',
1100 '72.0.3609.2',
1101 '71.0.3578.51',
1102 '72.0.3608.5',
1103 '72.0.3609.1',
1104 '72.0.3609.0',
1105 '71.0.3578.50',
1106 '70.0.3538.106',
1107 '72.0.3608.4',
1108 '72.0.3608.3',
1109 '72.0.3608.2',
1110 '71.0.3578.49',
1111 '72.0.3608.1',
1112 '72.0.3608.0',
1113 '70.0.3538.105',
1114 '71.0.3578.48',
1115 '72.0.3607.1',
1116 '72.0.3607.0',
1117 '71.0.3578.47',
1118 '70.0.3538.104',
1119 '72.0.3606.2',
1120 '72.0.3606.1',
1121 '72.0.3606.0',
1122 '71.0.3578.46',
1123 '70.0.3538.103',
1124 '70.0.3538.102',
1125 '72.0.3605.3',
1126 '72.0.3605.2',
1127 '72.0.3605.1',
1128 '72.0.3605.0',
1129 '71.0.3578.45',
1130 '70.0.3538.101',
1131 '71.0.3578.44',
1132 '71.0.3578.43',
1133 '70.0.3538.100',
1134 '70.0.3538.99',
1135 '71.0.3578.42',
1136 '72.0.3604.1',
1137 '72.0.3604.0',
1138 '71.0.3578.41',
1139 '70.0.3538.98',
1140 '71.0.3578.40',
1141 '72.0.3603.2',
1142 '72.0.3603.1',
1143 '72.0.3603.0',
1144 '71.0.3578.39',
1145 '70.0.3538.97',
1146 '72.0.3602.2',
1147 '71.0.3578.38',
1148 '71.0.3578.37',
1149 '72.0.3602.1',
1150 '72.0.3602.0',
1151 '71.0.3578.36',
1152 '70.0.3538.96',
1153 '72.0.3601.1',
1154 '72.0.3601.0',
1155 '71.0.3578.35',
1156 '70.0.3538.95',
1157 '72.0.3600.1',
1158 '72.0.3600.0',
1159 '71.0.3578.34',
1160 '70.0.3538.94',
1161 '72.0.3599.3',
1162 '72.0.3599.2',
1163 '72.0.3599.1',
1164 '72.0.3599.0',
1165 '71.0.3578.33',
1166 '70.0.3538.93',
1167 '72.0.3598.1',
1168 '72.0.3598.0',
1169 '71.0.3578.32',
1170 '70.0.3538.87',
1171 '72.0.3597.1',
1172 '72.0.3597.0',
1173 '72.0.3596.2',
1174 '71.0.3578.31',
1175 '70.0.3538.86',
1176 '71.0.3578.30',
1177 '71.0.3578.29',
1178 '72.0.3596.1',
1179 '72.0.3596.0',
1180 '71.0.3578.28',
1181 '70.0.3538.85',
1182 '72.0.3595.2',
1183 '72.0.3591.3',
1184 '72.0.3595.1',
1185 '72.0.3595.0',
1186 '71.0.3578.27',
1187 '70.0.3538.84',
1188 '72.0.3594.1',
1189 '72.0.3594.0',
1190 '71.0.3578.26',
1191 '70.0.3538.83',
1192 '72.0.3593.2',
1193 '72.0.3593.1',
1194 '72.0.3593.0',
1195 '71.0.3578.25',
1196 '70.0.3538.82',
1197 '72.0.3589.3',
1198 '72.0.3592.2',
1199 '72.0.3592.1',
1200 '72.0.3592.0',
1201 '71.0.3578.24',
1202 '72.0.3589.2',
1203 '70.0.3538.81',
1204 '70.0.3538.80',
1205 '72.0.3591.2',
1206 '72.0.3591.1',
1207 '72.0.3591.0',
1208 '71.0.3578.23',
1209 '70.0.3538.79',
1210 '71.0.3578.22',
1211 '72.0.3590.1',
1212 '72.0.3590.0',
1213 '71.0.3578.21',
1214 '70.0.3538.78',
1215 '70.0.3538.77',
1216 '72.0.3589.1',
1217 '72.0.3589.0',
1218 '71.0.3578.20',
1219 '70.0.3538.76',
1220 '71.0.3578.19',
1221 '70.0.3538.75',
1222 '72.0.3588.1',
1223 '72.0.3588.0',
1224 '71.0.3578.18',
1225 '70.0.3538.74',
1226 '72.0.3586.2',
1227 '72.0.3587.0',
1228 '71.0.3578.17',
1229 '70.0.3538.73',
1230 '72.0.3586.1',
1231 '72.0.3586.0',
1232 '71.0.3578.16',
1233 '70.0.3538.72',
1234 '72.0.3585.1',
1235 '72.0.3585.0',
1236 '71.0.3578.15',
1237 '70.0.3538.71',
1238 '71.0.3578.14',
1239 '72.0.3584.1',
1240 '72.0.3584.0',
1241 '71.0.3578.13',
1242 '70.0.3538.70',
1243 '72.0.3583.2',
1244 '71.0.3578.12',
1245 '72.0.3583.1',
1246 '72.0.3583.0',
1247 '71.0.3578.11',
1248 '70.0.3538.69',
1249 '71.0.3578.10',
1250 '72.0.3582.0',
1251 '72.0.3581.4',
1252 '71.0.3578.9',
1253 '70.0.3538.67',
1254 '72.0.3581.3',
1255 '72.0.3581.2',
1256 '72.0.3581.1',
1257 '72.0.3581.0',
1258 '71.0.3578.8',
1259 '70.0.3538.66',
1260 '72.0.3580.1',
1261 '72.0.3580.0',
1262 '71.0.3578.7',
1263 '70.0.3538.65',
1264 '71.0.3578.6',
1265 '72.0.3579.1',
1266 '72.0.3579.0',
1267 '71.0.3578.5',
1268 '70.0.3538.64',
1269 '71.0.3578.4',
1270 '71.0.3578.3',
1271 '71.0.3578.2',
1272 '71.0.3578.1',
1273 '71.0.3578.0',
1274 '70.0.3538.63',
1275 '69.0.3497.128',
1276 '70.0.3538.62',
1277 '70.0.3538.61',
1278 '70.0.3538.60',
1279 '70.0.3538.59',
1280 '71.0.3577.1',
1281 '71.0.3577.0',
1282 '70.0.3538.58',
1283 '69.0.3497.127',
1284 '71.0.3576.2',
1285 '71.0.3576.1',
1286 '71.0.3576.0',
1287 '70.0.3538.57',
1288 '70.0.3538.56',
1289 '71.0.3575.2',
1290 '70.0.3538.55',
1291 '69.0.3497.126',
1292 '70.0.3538.54',
1293 '71.0.3575.1',
1294 '71.0.3575.0',
1295 '71.0.3574.1',
1296 '71.0.3574.0',
1297 '70.0.3538.53',
1298 '69.0.3497.125',
1299 '70.0.3538.52',
1300 '71.0.3573.1',
1301 '71.0.3573.0',
1302 '70.0.3538.51',
1303 '69.0.3497.124',
1304 '71.0.3572.1',
1305 '71.0.3572.0',
1306 '70.0.3538.50',
1307 '69.0.3497.123',
1308 '71.0.3571.2',
1309 '70.0.3538.49',
1310 '69.0.3497.122',
1311 '71.0.3571.1',
1312 '71.0.3571.0',
1313 '70.0.3538.48',
1314 '69.0.3497.121',
1315 '71.0.3570.1',
1316 '71.0.3570.0',
1317 '70.0.3538.47',
1318 '69.0.3497.120',
1319 '71.0.3568.2',
1320 '71.0.3569.1',
1321 '71.0.3569.0',
1322 '70.0.3538.46',
1323 '69.0.3497.119',
1324 '70.0.3538.45',
1325 '71.0.3568.1',
1326 '71.0.3568.0',
1327 '70.0.3538.44',
1328 '69.0.3497.118',
1329 '70.0.3538.43',
1330 '70.0.3538.42',
1331 '71.0.3567.1',
1332 '71.0.3567.0',
1333 '70.0.3538.41',
1334 '69.0.3497.117',
1335 '71.0.3566.1',
1336 '71.0.3566.0',
1337 '70.0.3538.40',
1338 '69.0.3497.116',
1339 '71.0.3565.1',
1340 '71.0.3565.0',
1341 '70.0.3538.39',
1342 '69.0.3497.115',
1343 '71.0.3564.1',
1344 '71.0.3564.0',
1345 '70.0.3538.38',
1346 '69.0.3497.114',
1347 '71.0.3563.0',
1348 '71.0.3562.2',
1349 '70.0.3538.37',
1350 '69.0.3497.113',
1351 '70.0.3538.36',
1352 '70.0.3538.35',
1353 '71.0.3562.1',
1354 '71.0.3562.0',
1355 '70.0.3538.34',
1356 '69.0.3497.112',
1357 '70.0.3538.33',
1358 '71.0.3561.1',
1359 '71.0.3561.0',
1360 '70.0.3538.32',
1361 '69.0.3497.111',
1362 '71.0.3559.6',
1363 '71.0.3560.1',
1364 '71.0.3560.0',
1365 '71.0.3559.5',
1366 '71.0.3559.4',
1367 '70.0.3538.31',
1368 '69.0.3497.110',
1369 '71.0.3559.3',
1370 '70.0.3538.30',
1371 '69.0.3497.109',
1372 '71.0.3559.2',
1373 '71.0.3559.1',
1374 '71.0.3559.0',
1375 '70.0.3538.29',
1376 '69.0.3497.108',
1377 '71.0.3558.2',
1378 '71.0.3558.1',
1379 '71.0.3558.0',
1380 '70.0.3538.28',
1381 '69.0.3497.107',
1382 '71.0.3557.2',
1383 '71.0.3557.1',
1384 '71.0.3557.0',
1385 '70.0.3538.27',
1386 '69.0.3497.106',
1387 '71.0.3554.4',
1388 '70.0.3538.26',
1389 '71.0.3556.1',
1390 '71.0.3556.0',
1391 '70.0.3538.25',
1392 '71.0.3554.3',
1393 '69.0.3497.105',
1394 '71.0.3554.2',
1395 '70.0.3538.24',
1396 '69.0.3497.104',
1397 '71.0.3555.2',
1398 '70.0.3538.23',
1399 '71.0.3555.1',
1400 '71.0.3555.0',
1401 '70.0.3538.22',
1402 '69.0.3497.103',
1403 '71.0.3554.1',
1404 '71.0.3554.0',
1405 '70.0.3538.21',
1406 '69.0.3497.102',
1407 '71.0.3553.3',
1408 '70.0.3538.20',
1409 '69.0.3497.101',
1410 '71.0.3553.2',
1411 '69.0.3497.100',
1412 '71.0.3553.1',
1413 '71.0.3553.0',
1414 '70.0.3538.19',
1415 '69.0.3497.99',
1416 '69.0.3497.98',
1417 '69.0.3497.97',
1418 '71.0.3552.6',
1419 '71.0.3552.5',
1420 '71.0.3552.4',
1421 '71.0.3552.3',
1422 '71.0.3552.2',
1423 '71.0.3552.1',
1424 '71.0.3552.0',
1425 '70.0.3538.18',
1426 '69.0.3497.96',
1427 '71.0.3551.3',
1428 '71.0.3551.2',
1429 '71.0.3551.1',
1430 '71.0.3551.0',
1431 '70.0.3538.17',
1432 '69.0.3497.95',
1433 '71.0.3550.3',
1434 '71.0.3550.2',
1435 '71.0.3550.1',
1436 '71.0.3550.0',
1437 '70.0.3538.16',
1438 '69.0.3497.94',
1439 '71.0.3549.1',
1440 '71.0.3549.0',
1441 '70.0.3538.15',
1442 '69.0.3497.93',
1443 '69.0.3497.92',
1444 '71.0.3548.1',
1445 '71.0.3548.0',
1446 '70.0.3538.14',
1447 '69.0.3497.91',
1448 '71.0.3547.1',
1449 '71.0.3547.0',
1450 '70.0.3538.13',
1451 '69.0.3497.90',
1452 '71.0.3546.2',
1453 '69.0.3497.89',
1454 '71.0.3546.1',
1455 '71.0.3546.0',
1456 '70.0.3538.12',
1457 '69.0.3497.88',
1458 '71.0.3545.4',
1459 '71.0.3545.3',
1460 '71.0.3545.2',
1461 '71.0.3545.1',
1462 '71.0.3545.0',
1463 '70.0.3538.11',
1464 '69.0.3497.87',
1465 '71.0.3544.5',
1466 '71.0.3544.4',
1467 '71.0.3544.3',
1468 '71.0.3544.2',
1469 '71.0.3544.1',
1470 '71.0.3544.0',
1471 '69.0.3497.86',
1472 '70.0.3538.10',
1473 '69.0.3497.85',
1474 '70.0.3538.9',
1475 '69.0.3497.84',
1476 '71.0.3543.4',
1477 '70.0.3538.8',
1478 '71.0.3543.3',
1479 '71.0.3543.2',
1480 '71.0.3543.1',
1481 '71.0.3543.0',
1482 '70.0.3538.7',
1483 '69.0.3497.83',
1484 '71.0.3542.2',
1485 '71.0.3542.1',
1486 '71.0.3542.0',
1487 '70.0.3538.6',
1488 '69.0.3497.82',
1489 '69.0.3497.81',
1490 '71.0.3541.1',
1491 '71.0.3541.0',
1492 '70.0.3538.5',
1493 '69.0.3497.80',
1494 '71.0.3540.1',
1495 '71.0.3540.0',
1496 '70.0.3538.4',
1497 '69.0.3497.79',
1498 '70.0.3538.3',
1499 '71.0.3539.1',
1500 '71.0.3539.0',
1501 '69.0.3497.78',
1502 '68.0.3440.134',
1503 '69.0.3497.77',
1504 '70.0.3538.2',
1505 '70.0.3538.1',
1506 '70.0.3538.0',
1507 '69.0.3497.76',
1508 '68.0.3440.133',
1509 '69.0.3497.75',
1510 '70.0.3537.2',
1511 '70.0.3537.1',
1512 '70.0.3537.0',
1513 '69.0.3497.74',
1514 '68.0.3440.132',
1515 '70.0.3536.0',
1516 '70.0.3535.5',
1517 '70.0.3535.4',
1518 '70.0.3535.3',
1519 '69.0.3497.73',
1520 '68.0.3440.131',
1521 '70.0.3532.8',
1522 '70.0.3532.7',
1523 '69.0.3497.72',
1524 '69.0.3497.71',
1525 '70.0.3535.2',
1526 '70.0.3535.1',
1527 '70.0.3535.0',
1528 '69.0.3497.70',
1529 '68.0.3440.130',
1530 '69.0.3497.69',
1531 '68.0.3440.129',
1532 '70.0.3534.4',
1533 '70.0.3534.3',
1534 '70.0.3534.2',
1535 '70.0.3534.1',
1536 '70.0.3534.0',
1537 '69.0.3497.68',
1538 '68.0.3440.128',
1539 '70.0.3533.2',
1540 '70.0.3533.1',
1541 '70.0.3533.0',
1542 '69.0.3497.67',
1543 '68.0.3440.127',
1544 '70.0.3532.6',
1545 '70.0.3532.5',
1546 '70.0.3532.4',
1547 '69.0.3497.66',
1548 '68.0.3440.126',
1549 '70.0.3532.3',
1550 '70.0.3532.2',
1551 '70.0.3532.1',
1552 '69.0.3497.60',
1553 '69.0.3497.65',
1554 '69.0.3497.64',
1555 '70.0.3532.0',
1556 '70.0.3531.0',
1557 '70.0.3530.4',
1558 '70.0.3530.3',
1559 '70.0.3530.2',
1560 '69.0.3497.58',
1561 '68.0.3440.125',
1562 '69.0.3497.57',
1563 '69.0.3497.56',
1564 '69.0.3497.55',
1565 '69.0.3497.54',
1566 '70.0.3530.1',
1567 '70.0.3530.0',
1568 '69.0.3497.53',
1569 '68.0.3440.124',
1570 '69.0.3497.52',
1571 '70.0.3529.3',
1572 '70.0.3529.2',
1573 '70.0.3529.1',
1574 '70.0.3529.0',
1575 '69.0.3497.51',
1576 '70.0.3528.4',
1577 '68.0.3440.123',
1578 '70.0.3528.3',
1579 '70.0.3528.2',
1580 '70.0.3528.1',
1581 '70.0.3528.0',
1582 '69.0.3497.50',
1583 '68.0.3440.122',
1584 '70.0.3527.1',
1585 '70.0.3527.0',
1586 '69.0.3497.49',
1587 '68.0.3440.121',
1588 '70.0.3526.1',
1589 '70.0.3526.0',
1590 '68.0.3440.120',
1591 '69.0.3497.48',
1592 '69.0.3497.47',
1593 '68.0.3440.119',
1594 '68.0.3440.118',
1595 '70.0.3525.5',
1596 '70.0.3525.4',
1597 '70.0.3525.3',
1598 '68.0.3440.117',
1599 '69.0.3497.46',
1600 '70.0.3525.2',
1601 '70.0.3525.1',
1602 '70.0.3525.0',
1603 '69.0.3497.45',
1604 '68.0.3440.116',
1605 '70.0.3524.4',
1606 '70.0.3524.3',
1607 '69.0.3497.44',
1608 '70.0.3524.2',
1609 '70.0.3524.1',
1610 '70.0.3524.0',
1611 '70.0.3523.2',
1612 '69.0.3497.43',
1613 '68.0.3440.115',
1614 '70.0.3505.9',
1615 '69.0.3497.42',
1616 '70.0.3505.8',
1617 '70.0.3523.1',
1618 '70.0.3523.0',
1619 '69.0.3497.41',
1620 '68.0.3440.114',
1621 '70.0.3505.7',
1622 '69.0.3497.40',
1623 '70.0.3522.1',
1624 '70.0.3522.0',
1625 '70.0.3521.2',
1626 '69.0.3497.39',
1627 '68.0.3440.113',
1628 '70.0.3505.6',
1629 '70.0.3521.1',
1630 '70.0.3521.0',
1631 '69.0.3497.38',
1632 '68.0.3440.112',
1633 '70.0.3520.1',
1634 '70.0.3520.0',
1635 '69.0.3497.37',
1636 '68.0.3440.111',
1637 '70.0.3519.3',
1638 '70.0.3519.2',
1639 '70.0.3519.1',
1640 '70.0.3519.0',
1641 '69.0.3497.36',
1642 '68.0.3440.110',
1643 '70.0.3518.1',
1644 '70.0.3518.0',
1645 '69.0.3497.35',
1646 '69.0.3497.34',
1647 '68.0.3440.109',
1648 '70.0.3517.1',
1649 '70.0.3517.0',
1650 '69.0.3497.33',
1651 '68.0.3440.108',
1652 '69.0.3497.32',
1653 '70.0.3516.3',
1654 '70.0.3516.2',
1655 '70.0.3516.1',
1656 '70.0.3516.0',
1657 '69.0.3497.31',
1658 '68.0.3440.107',
1659 '70.0.3515.4',
1660 '68.0.3440.106',
1661 '70.0.3515.3',
1662 '70.0.3515.2',
1663 '70.0.3515.1',
1664 '70.0.3515.0',
1665 '69.0.3497.30',
1666 '68.0.3440.105',
1667 '68.0.3440.104',
1668 '70.0.3514.2',
1669 '70.0.3514.1',
1670 '70.0.3514.0',
1671 '69.0.3497.29',
1672 '68.0.3440.103',
1673 '70.0.3513.1',
1674 '70.0.3513.0',
1675 '69.0.3497.28',
1676 )
1677 return _USER_AGENT_TPL % random.choice(_CHROME_VERSIONS)
1678
1679
3e669f36 1680std_headers = {
f7a147e3 1681 'User-Agent': random_user_agent(),
59ae15a5
PH
1682 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
1683 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
1684 'Accept-Encoding': 'gzip, deflate',
1685 'Accept-Language': 'en-us,en;q=0.5',
3e669f36 1686}
f427df17 1687
5f6a1245 1688
fb37eb25
S
1689USER_AGENTS = {
1690 'Safari': 'Mozilla/5.0 (X11; Linux x86_64; rv:10.0) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27',
1691}
1692
1693
bf42a990
S
1694NO_DEFAULT = object()
1695
7105440c
YCH
1696ENGLISH_MONTH_NAMES = [
1697 'January', 'February', 'March', 'April', 'May', 'June',
1698 'July', 'August', 'September', 'October', 'November', 'December']
1699
f6717dec
S
1700MONTH_NAMES = {
1701 'en': ENGLISH_MONTH_NAMES,
1702 'fr': [
3e4185c3
S
1703 'janvier', 'février', 'mars', 'avril', 'mai', 'juin',
1704 'juillet', 'août', 'septembre', 'octobre', 'novembre', 'décembre'],
f6717dec 1705}
a942d6cb 1706
a7aaa398
S
1707KNOWN_EXTENSIONS = (
1708 'mp4', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'aac',
1709 'flv', 'f4v', 'f4a', 'f4b',
1710 'webm', 'ogg', 'ogv', 'oga', 'ogx', 'spx', 'opus',
1711 'mkv', 'mka', 'mk3d',
1712 'avi', 'divx',
1713 'mov',
1714 'asf', 'wmv', 'wma',
1715 '3gp', '3g2',
1716 'mp3',
1717 'flac',
1718 'ape',
1719 'wav',
1720 'f4f', 'f4m', 'm3u8', 'smil')
1721
c587cbb7 1722# needed for sanitizing filenames in restricted mode
c8827027 1723ACCENT_CHARS = dict(zip('ÂÃÄÀÁÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖŐØŒÙÚÛÜŰÝÞßàáâãäåæçèéêëìíîïðñòóôõöőøœùúûüűýþÿ',
fd35d8cd
JW
1724 itertools.chain('AAAAAA', ['AE'], 'CEEEEIIIIDNOOOOOOO', ['OE'], 'UUUUUY', ['TH', 'ss'],
1725 'aaaaaa', ['ae'], 'ceeeeiiiionooooooo', ['oe'], 'uuuuuy', ['th'], 'y')))
c587cbb7 1726
46f59e89
S
1727DATE_FORMATS = (
1728 '%d %B %Y',
1729 '%d %b %Y',
1730 '%B %d %Y',
cb655f34
S
1731 '%B %dst %Y',
1732 '%B %dnd %Y',
9d30c213 1733 '%B %drd %Y',
cb655f34 1734 '%B %dth %Y',
46f59e89 1735 '%b %d %Y',
cb655f34
S
1736 '%b %dst %Y',
1737 '%b %dnd %Y',
9d30c213 1738 '%b %drd %Y',
cb655f34 1739 '%b %dth %Y',
46f59e89
S
1740 '%b %dst %Y %I:%M',
1741 '%b %dnd %Y %I:%M',
9d30c213 1742 '%b %drd %Y %I:%M',
46f59e89
S
1743 '%b %dth %Y %I:%M',
1744 '%Y %m %d',
1745 '%Y-%m-%d',
bccdbd22 1746 '%Y.%m.%d.',
46f59e89 1747 '%Y/%m/%d',
81c13222 1748 '%Y/%m/%d %H:%M',
46f59e89 1749 '%Y/%m/%d %H:%M:%S',
1931a55e
THD
1750 '%Y%m%d%H%M',
1751 '%Y%m%d%H%M%S',
0c1c6f4b 1752 '%Y-%m-%d %H:%M',
46f59e89
S
1753 '%Y-%m-%d %H:%M:%S',
1754 '%Y-%m-%d %H:%M:%S.%f',
5014558a 1755 '%Y-%m-%d %H:%M:%S:%f',
46f59e89
S
1756 '%d.%m.%Y %H:%M',
1757 '%d.%m.%Y %H.%M',
1758 '%Y-%m-%dT%H:%M:%SZ',
1759 '%Y-%m-%dT%H:%M:%S.%fZ',
1760 '%Y-%m-%dT%H:%M:%S.%f0Z',
1761 '%Y-%m-%dT%H:%M:%S',
1762 '%Y-%m-%dT%H:%M:%S.%f',
1763 '%Y-%m-%dT%H:%M',
c6eed6b8
S
1764 '%b %d %Y at %H:%M',
1765 '%b %d %Y at %H:%M:%S',
b555ae9b
S
1766 '%B %d %Y at %H:%M',
1767 '%B %d %Y at %H:%M:%S',
a63d9bd0 1768 '%H:%M %d-%b-%Y',
46f59e89
S
1769)
1770
1771DATE_FORMATS_DAY_FIRST = list(DATE_FORMATS)
1772DATE_FORMATS_DAY_FIRST.extend([
1773 '%d-%m-%Y',
1774 '%d.%m.%Y',
1775 '%d.%m.%y',
1776 '%d/%m/%Y',
1777 '%d/%m/%y',
1778 '%d/%m/%Y %H:%M:%S',
1779])
1780
1781DATE_FORMATS_MONTH_FIRST = list(DATE_FORMATS)
1782DATE_FORMATS_MONTH_FIRST.extend([
1783 '%m-%d-%Y',
1784 '%m.%d.%Y',
1785 '%m/%d/%Y',
1786 '%m/%d/%y',
1787 '%m/%d/%Y %H:%M:%S',
1788])
1789
06b3fe29 1790PACKED_CODES_RE = r"}\('(.+)',(\d+),(\d+),'([^']+)'\.split\('\|'\)"
22f5f5c6 1791JSON_LD_RE = r'(?is)<script[^>]+type=(["\']?)application/ld\+json\1[^>]*>(?P<json_ld>.+?)</script>'
06b3fe29 1792
7105440c 1793
d77c3dfd 1794def preferredencoding():
59ae15a5 1795 """Get preferred encoding.
d77c3dfd 1796
59ae15a5
PH
1797 Returns the best encoding scheme for the system, based on
1798 locale.getpreferredencoding() and some further tweaks.
1799 """
1800 try:
1801 pref = locale.getpreferredencoding()
28e614de 1802 'TEST'.encode(pref)
70a1165b 1803 except Exception:
59ae15a5 1804 pref = 'UTF-8'
bae611f2 1805
59ae15a5 1806 return pref
d77c3dfd 1807
f4bfd65f 1808
181c8655 1809def write_json_file(obj, fn):
1394646a 1810 """ Encode obj as JSON and write it to fn, atomically if possible """
181c8655 1811
92120217 1812 fn = encodeFilename(fn)
61ee5aeb 1813 if sys.version_info < (3, 0) and sys.platform != 'win32':
ec5f6016
JMF
1814 encoding = get_filesystem_encoding()
1815 # os.path.basename returns a bytes object, but NamedTemporaryFile
1816 # will fail if the filename contains non ascii characters unless we
1817 # use a unicode object
1818 path_basename = lambda f: os.path.basename(fn).decode(encoding)
1819 # the same for os.path.dirname
1820 path_dirname = lambda f: os.path.dirname(fn).decode(encoding)
1821 else:
1822 path_basename = os.path.basename
1823 path_dirname = os.path.dirname
1824
73159f99
S
1825 args = {
1826 'suffix': '.tmp',
ec5f6016
JMF
1827 'prefix': path_basename(fn) + '.',
1828 'dir': path_dirname(fn),
73159f99
S
1829 'delete': False,
1830 }
1831
181c8655
PH
1832 # In Python 2.x, json.dump expects a bytestream.
1833 # In Python 3.x, it writes to a character stream
1834 if sys.version_info < (3, 0):
73159f99 1835 args['mode'] = 'wb'
181c8655 1836 else:
73159f99
S
1837 args.update({
1838 'mode': 'w',
1839 'encoding': 'utf-8',
1840 })
1841
c86b6142 1842 tf = tempfile.NamedTemporaryFile(**compat_kwargs(args))
181c8655
PH
1843
1844 try:
1845 with tf:
6e84b215 1846 json.dump(obj, tf)
1394646a
IK
1847 if sys.platform == 'win32':
1848 # Need to remove existing file on Windows, else os.rename raises
1849 # WindowsError or FileExistsError.
1850 try:
1851 os.unlink(fn)
1852 except OSError:
1853 pass
9cd5f54e
R
1854 try:
1855 mask = os.umask(0)
1856 os.umask(mask)
1857 os.chmod(tf.name, 0o666 & ~mask)
1858 except OSError:
1859 pass
181c8655 1860 os.rename(tf.name, fn)
70a1165b 1861 except Exception:
181c8655
PH
1862 try:
1863 os.remove(tf.name)
1864 except OSError:
1865 pass
1866 raise
1867
1868
1869if sys.version_info >= (2, 7):
ee114368 1870 def find_xpath_attr(node, xpath, key, val=None):
59ae56fa 1871 """ Find the xpath xpath[@key=val] """
5d2354f1 1872 assert re.match(r'^[a-zA-Z_-]+$', key)
ee114368 1873 expr = xpath + ('[@%s]' % key if val is None else "[@%s='%s']" % (key, val))
59ae56fa
PH
1874 return node.find(expr)
1875else:
ee114368 1876 def find_xpath_attr(node, xpath, key, val=None):
810c10ba 1877 for f in node.findall(compat_xpath(xpath)):
ee114368
S
1878 if key not in f.attrib:
1879 continue
1880 if val is None or f.attrib.get(key) == val:
59ae56fa
PH
1881 return f
1882 return None
1883
d7e66d39
JMF
1884# On python2.6 the xml.etree.ElementTree.Element methods don't support
1885# the namespace parameter
5f6a1245
JW
1886
1887
d7e66d39
JMF
1888def xpath_with_ns(path, ns_map):
1889 components = [c.split(':') for c in path.split('/')]
1890 replaced = []
1891 for c in components:
1892 if len(c) == 1:
1893 replaced.append(c[0])
1894 else:
1895 ns, tag = c
1896 replaced.append('{%s}%s' % (ns_map[ns], tag))
1897 return '/'.join(replaced)
1898
d77c3dfd 1899
a41fb80c 1900def xpath_element(node, xpath, name=None, fatal=False, default=NO_DEFAULT):
578c0745 1901 def _find_xpath(xpath):
810c10ba 1902 return node.find(compat_xpath(xpath))
578c0745
S
1903
1904 if isinstance(xpath, (str, compat_str)):
1905 n = _find_xpath(xpath)
1906 else:
1907 for xp in xpath:
1908 n = _find_xpath(xp)
1909 if n is not None:
1910 break
d74bebd5 1911
8e636da4 1912 if n is None:
bf42a990
S
1913 if default is not NO_DEFAULT:
1914 return default
1915 elif fatal:
bf0ff932
PH
1916 name = xpath if name is None else name
1917 raise ExtractorError('Could not find XML element %s' % name)
1918 else:
1919 return None
a41fb80c
S
1920 return n
1921
1922
1923def xpath_text(node, xpath, name=None, fatal=False, default=NO_DEFAULT):
8e636da4
S
1924 n = xpath_element(node, xpath, name, fatal=fatal, default=default)
1925 if n is None or n == default:
1926 return n
1927 if n.text is None:
1928 if default is not NO_DEFAULT:
1929 return default
1930 elif fatal:
1931 name = xpath if name is None else name
1932 raise ExtractorError('Could not find XML element\'s text %s' % name)
1933 else:
1934 return None
1935 return n.text
a41fb80c
S
1936
1937
1938def xpath_attr(node, xpath, key, name=None, fatal=False, default=NO_DEFAULT):
1939 n = find_xpath_attr(node, xpath, key)
1940 if n is None:
1941 if default is not NO_DEFAULT:
1942 return default
1943 elif fatal:
1944 name = '%s[@%s]' % (xpath, key) if name is None else name
1945 raise ExtractorError('Could not find XML attribute %s' % name)
1946 else:
1947 return None
1948 return n.attrib[key]
bf0ff932
PH
1949
1950
9e6dd238 1951def get_element_by_id(id, html):
43e8fafd 1952 """Return the content of the tag with the specified ID in the passed HTML document"""
611c1dd9 1953 return get_element_by_attribute('id', id, html)
43e8fafd 1954
12ea2f30 1955
84c237fb 1956def get_element_by_class(class_name, html):
2af12ad9
TC
1957 """Return the content of the first tag with the specified class in the passed HTML document"""
1958 retval = get_elements_by_class(class_name, html)
1959 return retval[0] if retval else None
1960
1961
1962def get_element_by_attribute(attribute, value, html, escape_value=True):
1963 retval = get_elements_by_attribute(attribute, value, html, escape_value)
1964 return retval[0] if retval else None
1965
1966
1967def get_elements_by_class(class_name, html):
1968 """Return the content of all tags with the specified class in the passed HTML document as a list"""
1969 return get_elements_by_attribute(
84c237fb
YCH
1970 'class', r'[^\'"]*\b%s\b[^\'"]*' % re.escape(class_name),
1971 html, escape_value=False)
1972
1973
2af12ad9 1974def get_elements_by_attribute(attribute, value, html, escape_value=True):
43e8fafd 1975 """Return the content of the tag with the specified attribute in the passed HTML document"""
9e6dd238 1976
84c237fb
YCH
1977 value = re.escape(value) if escape_value else value
1978
2af12ad9
TC
1979 retlist = []
1980 for m in re.finditer(r'''(?xs)
38285056 1981 <([a-zA-Z0-9:._-]+)
609ff8ca 1982 (?:\s+[a-zA-Z0-9:._-]+(?:=[a-zA-Z0-9:._-]*|="[^"]*"|='[^']*'|))*?
38285056 1983 \s+%s=['"]?%s['"]?
609ff8ca 1984 (?:\s+[a-zA-Z0-9:._-]+(?:=[a-zA-Z0-9:._-]*|="[^"]*"|='[^']*'|))*?
38285056
PH
1985 \s*>
1986 (?P<content>.*?)
1987 </\1>
2af12ad9
TC
1988 ''' % (re.escape(attribute), value), html):
1989 res = m.group('content')
38285056 1990
2af12ad9
TC
1991 if res.startswith('"') or res.startswith("'"):
1992 res = res[1:-1]
38285056 1993
2af12ad9 1994 retlist.append(unescapeHTML(res))
a921f407 1995
2af12ad9 1996 return retlist
a921f407 1997
c5229f39 1998
8bb56eee
BF
1999class HTMLAttributeParser(compat_HTMLParser):
2000 """Trivial HTML parser to gather the attributes for a single element"""
b6e0c7d2 2001
8bb56eee 2002 def __init__(self):
c5229f39 2003 self.attrs = {}
8bb56eee
BF
2004 compat_HTMLParser.__init__(self)
2005
2006 def handle_starttag(self, tag, attrs):
2007 self.attrs = dict(attrs)
2008
c5229f39 2009
73673ccf
FF
2010class HTMLListAttrsParser(compat_HTMLParser):
2011 """HTML parser to gather the attributes for the elements of a list"""
2012
2013 def __init__(self):
2014 compat_HTMLParser.__init__(self)
2015 self.items = []
2016 self._level = 0
2017
2018 def handle_starttag(self, tag, attrs):
2019 if tag == 'li' and self._level == 0:
2020 self.items.append(dict(attrs))
2021 self._level += 1
2022
2023 def handle_endtag(self, tag):
2024 self._level -= 1
2025
2026
8bb56eee
BF
2027def extract_attributes(html_element):
2028 """Given a string for an HTML element such as
2029 <el
2030 a="foo" B="bar" c="&98;az" d=boz
2031 empty= noval entity="&amp;"
2032 sq='"' dq="'"
2033 >
2034 Decode and return a dictionary of attributes.
2035 {
2036 'a': 'foo', 'b': 'bar', c: 'baz', d: 'boz',
2037 'empty': '', 'noval': None, 'entity': '&',
2038 'sq': '"', 'dq': '\''
2039 }.
2040 NB HTMLParser is stricter in Python 2.6 & 3.2 than in later versions,
2041 but the cases in the unit test will work for all of 2.6, 2.7, 3.2-3.5.
2042 """
2043 parser = HTMLAttributeParser()
b4a3d461
S
2044 try:
2045 parser.feed(html_element)
2046 parser.close()
2047 # Older Python may throw HTMLParseError in case of malformed HTML
2048 except compat_HTMLParseError:
2049 pass
8bb56eee 2050 return parser.attrs
9e6dd238 2051
c5229f39 2052
73673ccf
FF
2053def parse_list(webpage):
2054 """Given a string for an series of HTML <li> elements,
2055 return a dictionary of their attributes"""
2056 parser = HTMLListAttrsParser()
2057 parser.feed(webpage)
2058 parser.close()
2059 return parser.items
2060
2061
9e6dd238 2062def clean_html(html):
59ae15a5 2063 """Clean an HTML snippet into a readable string"""
dd622d7c
PH
2064
2065 if html is None: # Convenience for sanitizing descriptions etc.
2066 return html
2067
59ae15a5
PH
2068 # Newline vs <br />
2069 html = html.replace('\n', ' ')
edd9221c
TF
2070 html = re.sub(r'(?u)\s*<\s*br\s*/?\s*>\s*', '\n', html)
2071 html = re.sub(r'(?u)<\s*/\s*p\s*>\s*<\s*p[^>]*>', '\n', html)
59ae15a5
PH
2072 # Strip html tags
2073 html = re.sub('<.*?>', '', html)
2074 # Replace html entities
2075 html = unescapeHTML(html)
7decf895 2076 return html.strip()
9e6dd238
FV
2077
2078
d77c3dfd 2079def sanitize_open(filename, open_mode):
59ae15a5
PH
2080 """Try to open the given filename, and slightly tweak it if this fails.
2081
2082 Attempts to open the given filename. If this fails, it tries to change
2083 the filename slightly, step by step, until it's either able to open it
2084 or it fails and raises a final exception, like the standard open()
2085 function.
2086
2087 It returns the tuple (stream, definitive_file_name).
2088 """
2089 try:
28e614de 2090 if filename == '-':
59ae15a5
PH
2091 if sys.platform == 'win32':
2092 import msvcrt
2093 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
898280a0 2094 return (sys.stdout.buffer if hasattr(sys.stdout, 'buffer') else sys.stdout, filename)
59ae15a5
PH
2095 stream = open(encodeFilename(filename), open_mode)
2096 return (stream, filename)
2097 except (IOError, OSError) as err:
f45c185f
PH
2098 if err.errno in (errno.EACCES,):
2099 raise
59ae15a5 2100
f45c185f 2101 # In case of error, try to remove win32 forbidden chars
d55de57b 2102 alt_filename = sanitize_path(filename)
f45c185f
PH
2103 if alt_filename == filename:
2104 raise
2105 else:
2106 # An exception here should be caught in the caller
d55de57b 2107 stream = open(encodeFilename(alt_filename), open_mode)
f45c185f 2108 return (stream, alt_filename)
d77c3dfd
FV
2109
2110
2111def timeconvert(timestr):
59ae15a5
PH
2112 """Convert RFC 2822 defined time string into system timestamp"""
2113 timestamp = None
2114 timetuple = email.utils.parsedate_tz(timestr)
2115 if timetuple is not None:
2116 timestamp = email.utils.mktime_tz(timetuple)
2117 return timestamp
1c469a94 2118
5f6a1245 2119
796173d0 2120def sanitize_filename(s, restricted=False, is_id=False):
59ae15a5
PH
2121 """Sanitizes a string so it could be used as part of a filename.
2122 If restricted is set, use a stricter subset of allowed characters.
158af524
S
2123 Set is_id if this is not an arbitrary string, but an ID that should be kept
2124 if possible.
59ae15a5
PH
2125 """
2126 def replace_insane(char):
c587cbb7
AT
2127 if restricted and char in ACCENT_CHARS:
2128 return ACCENT_CHARS[char]
91dd88b9 2129 elif not restricted and char == '\n':
2130 return ' '
2131 elif char == '?' or ord(char) < 32 or ord(char) == 127:
59ae15a5
PH
2132 return ''
2133 elif char == '"':
2134 return '' if restricted else '\''
2135 elif char == ':':
2136 return '_-' if restricted else ' -'
2137 elif char in '\\/|*<>':
2138 return '_'
627dcfff 2139 if restricted and (char in '!&\'()[]{}$;`^,#' or char.isspace()):
59ae15a5
PH
2140 return '_'
2141 if restricted and ord(char) > 127:
2142 return '_'
2143 return char
2144
639f1cea 2145 if s == '':
2146 return ''
2aeb06d6
PH
2147 # Handle timestamps
2148 s = re.sub(r'[0-9]+(?::[0-9]+)+', lambda m: m.group(0).replace(':', '_'), s)
28e614de 2149 result = ''.join(map(replace_insane, s))
796173d0
PH
2150 if not is_id:
2151 while '__' in result:
2152 result = result.replace('__', '_')
2153 result = result.strip('_')
2154 # Common case of "Foreign band name - English song title"
2155 if restricted and result.startswith('-_'):
2156 result = result[2:]
5a42414b
PH
2157 if result.startswith('-'):
2158 result = '_' + result[len('-'):]
a7440261 2159 result = result.lstrip('.')
796173d0
PH
2160 if not result:
2161 result = '_'
59ae15a5 2162 return result
d77c3dfd 2163
5f6a1245 2164
c2934512 2165def sanitize_path(s, force=False):
a2aaf4db 2166 """Sanitizes and normalizes path on Windows"""
c2934512 2167 if sys.platform == 'win32':
c4218ac3 2168 force = False
c2934512 2169 drive_or_unc, _ = os.path.splitdrive(s)
2170 if sys.version_info < (2, 7) and not drive_or_unc:
2171 drive_or_unc, _ = os.path.splitunc(s)
2172 elif force:
2173 drive_or_unc = ''
2174 else:
a2aaf4db 2175 return s
c2934512 2176
be531ef1
S
2177 norm_path = os.path.normpath(remove_start(s, drive_or_unc)).split(os.path.sep)
2178 if drive_or_unc:
a2aaf4db
S
2179 norm_path.pop(0)
2180 sanitized_path = [
ec85ded8 2181 path_part if path_part in ['.', '..'] else re.sub(r'(?:[/<>:"\|\\?\*]|[\s.]$)', '#', path_part)
a2aaf4db 2182 for path_part in norm_path]
be531ef1
S
2183 if drive_or_unc:
2184 sanitized_path.insert(0, drive_or_unc + os.path.sep)
c4218ac3 2185 elif force and s[0] == os.path.sep:
2186 sanitized_path.insert(0, os.path.sep)
a2aaf4db
S
2187 return os.path.join(*sanitized_path)
2188
2189
17bcc626 2190def sanitize_url(url):
befa4708
S
2191 # Prepend protocol-less URLs with `http:` scheme in order to mitigate
2192 # the number of unwanted failures due to missing protocol
2193 if url.startswith('//'):
2194 return 'http:%s' % url
2195 # Fix some common typos seen so far
2196 COMMON_TYPOS = (
067aa17e 2197 # https://github.com/ytdl-org/youtube-dl/issues/15649
befa4708
S
2198 (r'^httpss://', r'https://'),
2199 # https://bx1.be/lives/direct-tv/
2200 (r'^rmtp([es]?)://', r'rtmp\1://'),
2201 )
2202 for mistake, fixup in COMMON_TYPOS:
2203 if re.match(mistake, url):
2204 return re.sub(mistake, fixup, url)
bc6b9bcd 2205 return url
17bcc626
S
2206
2207
5435dcf9
HH
2208def extract_basic_auth(url):
2209 parts = compat_urlparse.urlsplit(url)
2210 if parts.username is None:
2211 return url, None
2212 url = compat_urlparse.urlunsplit(parts._replace(netloc=(
2213 parts.hostname if parts.port is None
2214 else '%s:%d' % (parts.hostname, parts.port))))
2215 auth_payload = base64.b64encode(
2216 ('%s:%s' % (parts.username, parts.password or '')).encode('utf-8'))
2217 return url, 'Basic ' + auth_payload.decode('utf-8')
2218
2219
67dda517 2220def sanitized_Request(url, *args, **kwargs):
bc6b9bcd 2221 url, auth_header = extract_basic_auth(escape_url(sanitize_url(url)))
5435dcf9
HH
2222 if auth_header is not None:
2223 headers = args[1] if len(args) >= 2 else kwargs.setdefault('headers', {})
2224 headers['Authorization'] = auth_header
2225 return compat_urllib_request.Request(url, *args, **kwargs)
67dda517
S
2226
2227
51098426
S
2228def expand_path(s):
2229 """Expand shell variables and ~"""
2230 return os.path.expandvars(compat_expanduser(s))
2231
2232
d77c3dfd 2233def orderedSet(iterable):
59ae15a5
PH
2234 """ Remove all duplicates from the input iterable """
2235 res = []
2236 for el in iterable:
2237 if el not in res:
2238 res.append(el)
2239 return res
d77c3dfd 2240
912b38b4 2241
55b2f099 2242def _htmlentity_transform(entity_with_semicolon):
4e408e47 2243 """Transforms an HTML entity to a character."""
55b2f099
YCH
2244 entity = entity_with_semicolon[:-1]
2245
4e408e47
PH
2246 # Known non-numeric HTML entity
2247 if entity in compat_html_entities.name2codepoint:
2248 return compat_chr(compat_html_entities.name2codepoint[entity])
2249
55b2f099
YCH
2250 # TODO: HTML5 allows entities without a semicolon. For example,
2251 # '&Eacuteric' should be decoded as 'Éric'.
2252 if entity_with_semicolon in compat_html_entities_html5:
2253 return compat_html_entities_html5[entity_with_semicolon]
2254
91757b0f 2255 mobj = re.match(r'#(x[0-9a-fA-F]+|[0-9]+)', entity)
4e408e47
PH
2256 if mobj is not None:
2257 numstr = mobj.group(1)
28e614de 2258 if numstr.startswith('x'):
4e408e47 2259 base = 16
28e614de 2260 numstr = '0%s' % numstr
4e408e47
PH
2261 else:
2262 base = 10
067aa17e 2263 # See https://github.com/ytdl-org/youtube-dl/issues/7518
7aefc49c
S
2264 try:
2265 return compat_chr(int(numstr, base))
2266 except ValueError:
2267 pass
4e408e47
PH
2268
2269 # Unknown entity in name, return its literal representation
7a3f0c00 2270 return '&%s;' % entity
4e408e47
PH
2271
2272
d77c3dfd 2273def unescapeHTML(s):
912b38b4
PH
2274 if s is None:
2275 return None
2276 assert type(s) == compat_str
d77c3dfd 2277
4e408e47 2278 return re.sub(
95f3f7c2 2279 r'&([^&;]+;)', lambda m: _htmlentity_transform(m.group(1)), s)
d77c3dfd 2280
8bf48f23 2281
cdb19aa4 2282def escapeHTML(text):
2283 return (
2284 text
2285 .replace('&', '&amp;')
2286 .replace('<', '&lt;')
2287 .replace('>', '&gt;')
2288 .replace('"', '&quot;')
2289 .replace("'", '&#39;')
2290 )
2291
2292
f5b1bca9 2293def process_communicate_or_kill(p, *args, **kwargs):
2294 try:
2295 return p.communicate(*args, **kwargs)
2296 except BaseException: # Including KeyboardInterrupt
2297 p.kill()
2298 p.wait()
2299 raise
2300
2301
d3c93ec2 2302class Popen(subprocess.Popen):
2303 if sys.platform == 'win32':
2304 _startupinfo = subprocess.STARTUPINFO()
2305 _startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
2306 else:
2307 _startupinfo = None
2308
2309 def __init__(self, *args, **kwargs):
2310 super(Popen, self).__init__(*args, **kwargs, startupinfo=self._startupinfo)
2311
2312 def communicate_or_kill(self, *args, **kwargs):
2313 return process_communicate_or_kill(self, *args, **kwargs)
2314
2315
aa49acd1
S
2316def get_subprocess_encoding():
2317 if sys.platform == 'win32' and sys.getwindowsversion()[0] >= 5:
2318 # For subprocess calls, encode with locale encoding
2319 # Refer to http://stackoverflow.com/a/9951851/35070
2320 encoding = preferredencoding()
2321 else:
2322 encoding = sys.getfilesystemencoding()
2323 if encoding is None:
2324 encoding = 'utf-8'
2325 return encoding
2326
2327
8bf48f23 2328def encodeFilename(s, for_subprocess=False):
59ae15a5
PH
2329 """
2330 @param s The name of the file
2331 """
d77c3dfd 2332
8bf48f23 2333 assert type(s) == compat_str
d77c3dfd 2334
59ae15a5
PH
2335 # Python 3 has a Unicode API
2336 if sys.version_info >= (3, 0):
2337 return s
0f00efed 2338
aa49acd1
S
2339 # Pass '' directly to use Unicode APIs on Windows 2000 and up
2340 # (Detecting Windows NT 4 is tricky because 'major >= 4' would
2341 # match Windows 9x series as well. Besides, NT 4 is obsolete.)
2342 if not for_subprocess and sys.platform == 'win32' and sys.getwindowsversion()[0] >= 5:
2343 return s
2344
8ee239e9
YCH
2345 # Jython assumes filenames are Unicode strings though reported as Python 2.x compatible
2346 if sys.platform.startswith('java'):
2347 return s
2348
aa49acd1
S
2349 return s.encode(get_subprocess_encoding(), 'ignore')
2350
2351
2352def decodeFilename(b, for_subprocess=False):
2353
2354 if sys.version_info >= (3, 0):
2355 return b
2356
2357 if not isinstance(b, bytes):
2358 return b
2359
2360 return b.decode(get_subprocess_encoding(), 'ignore')
8bf48f23 2361
f07b74fc
PH
2362
2363def encodeArgument(s):
2364 if not isinstance(s, compat_str):
2365 # Legacy code that uses byte strings
2366 # Uncomment the following line after fixing all post processors
7af808a5 2367 # assert False, 'Internal error: %r should be of type %r, is %r' % (s, compat_str, type(s))
f07b74fc
PH
2368 s = s.decode('ascii')
2369 return encodeFilename(s, True)
2370
2371
aa49acd1
S
2372def decodeArgument(b):
2373 return decodeFilename(b, True)
2374
2375
8271226a
PH
2376def decodeOption(optval):
2377 if optval is None:
2378 return optval
2379 if isinstance(optval, bytes):
2380 optval = optval.decode(preferredencoding())
2381
2382 assert isinstance(optval, compat_str)
2383 return optval
1c256f70 2384
5f6a1245 2385
aa7785f8 2386_timetuple = collections.namedtuple('Time', ('hours', 'minutes', 'seconds', 'milliseconds'))
2387
2388
2389def timetuple_from_msec(msec):
2390 secs, msec = divmod(msec, 1000)
2391 mins, secs = divmod(secs, 60)
2392 hrs, mins = divmod(mins, 60)
2393 return _timetuple(hrs, mins, secs, msec)
2394
2395
cdb19aa4 2396def formatSeconds(secs, delim=':', msec=False):
aa7785f8 2397 time = timetuple_from_msec(secs * 1000)
2398 if time.hours:
2399 ret = '%d%s%02d%s%02d' % (time.hours, delim, time.minutes, delim, time.seconds)
2400 elif time.minutes:
2401 ret = '%d%s%02d' % (time.minutes, delim, time.seconds)
4539dd30 2402 else:
aa7785f8 2403 ret = '%d' % time.seconds
2404 return '%s.%03d' % (ret, time.milliseconds) if msec else ret
4539dd30 2405
a0ddb8a2 2406
77562778 2407def _ssl_load_windows_store_certs(ssl_context, storename):
2408 # Code adapted from _load_windows_store_certs in https://github.com/python/cpython/blob/main/Lib/ssl.py
2409 try:
2410 certs = [cert for cert, encoding, trust in ssl.enum_certificates(storename)
2411 if encoding == 'x509_asn' and (
2412 trust is True or ssl.Purpose.SERVER_AUTH.oid in trust)]
2413 except PermissionError:
2414 return
2415 for cert in certs:
a2366922 2416 try:
77562778 2417 ssl_context.load_verify_locations(cadata=cert)
2418 except ssl.SSLError:
a2366922
PH
2419 pass
2420
77562778 2421
2422def make_HTTPS_handler(params, **kwargs):
2423 opts_check_certificate = not params.get('nocheckcertificate')
2424 context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
2425 context.check_hostname = opts_check_certificate
2426 context.verify_mode = ssl.CERT_REQUIRED if opts_check_certificate else ssl.CERT_NONE
2427 if opts_check_certificate:
4e3d1898 2428 try:
2429 context.load_default_certs()
2430 # Work around the issue in load_default_certs when there are bad certificates. See:
2431 # https://github.com/yt-dlp/yt-dlp/issues/1060,
2432 # https://bugs.python.org/issue35665, https://bugs.python.org/issue45312
2433 except ssl.SSLError:
2434 # enum_certificates is not present in mingw python. See https://github.com/yt-dlp/yt-dlp/issues/1151
2435 if sys.platform == 'win32' and hasattr(ssl, 'enum_certificates'):
2436 # Create a new context to discard any certificates that were already loaded
2437 context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
2438 context.check_hostname, context.verify_mode = True, ssl.CERT_REQUIRED
2439 for storename in ('CA', 'ROOT'):
2440 _ssl_load_windows_store_certs(context, storename)
2441 context.set_default_verify_paths()
77562778 2442 return YoutubeDLHTTPSHandler(params, context=context, **kwargs)
ea6d901e 2443
732ea2f0 2444
5873d4cc 2445def bug_reports_message(before=';'):
08f2a92c 2446 if ytdl_is_updateable():
7a5c1cfe 2447 update_cmd = 'type yt-dlp -U to update'
08f2a92c 2448 else:
7a5c1cfe 2449 update_cmd = 'see https://github.com/yt-dlp/yt-dlp on how to update'
5873d4cc 2450 msg = 'please report this issue on https://github.com/yt-dlp/yt-dlp .'
08f2a92c 2451 msg += ' Make sure you are using the latest version; %s.' % update_cmd
7a5c1cfe 2452 msg += ' Be sure to call yt-dlp with the --verbose flag and include its complete output.'
5873d4cc
F
2453
2454 before = before.rstrip()
2455 if not before or before.endswith(('.', '!', '?')):
2456 msg = msg[0].title() + msg[1:]
2457
2458 return (before + ' ' if before else '') + msg
08f2a92c
JMF
2459
2460
bf5b9d85
PM
2461class YoutubeDLError(Exception):
2462 """Base exception for YoutubeDL errors."""
aa9369a2 2463 msg = None
2464
2465 def __init__(self, msg=None):
2466 if msg is not None:
2467 self.msg = msg
2468 elif self.msg is None:
2469 self.msg = type(self).__name__
2470 super().__init__(self.msg)
bf5b9d85
PM
2471
2472
3158150c 2473network_exceptions = [compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error]
2474if hasattr(ssl, 'CertificateError'):
2475 network_exceptions.append(ssl.CertificateError)
2476network_exceptions = tuple(network_exceptions)
2477
2478
bf5b9d85 2479class ExtractorError(YoutubeDLError):
1c256f70 2480 """Error during info extraction."""
5f6a1245 2481
1151c407 2482 def __init__(self, msg, tb=None, expected=False, cause=None, video_id=None, ie=None):
9a82b238 2483 """ tb, if given, is the original traceback (so that it can be printed out).
7a5c1cfe 2484 If expected is set, this is a normal error message and most likely not a bug in yt-dlp.
9a82b238 2485 """
3158150c 2486 if sys.exc_info()[0] in network_exceptions:
9a82b238 2487 expected = True
d5979c5d 2488
526d74ec 2489 self.msg = str(msg)
1c256f70 2490 self.traceback = tb
1151c407 2491 self.expected = expected
2eabb802 2492 self.cause = cause
d11271dd 2493 self.video_id = video_id
1151c407 2494 self.ie = ie
2495 self.exc_info = sys.exc_info() # preserve original exception
2496
2497 super(ExtractorError, self).__init__(''.join((
2498 format_field(ie, template='[%s] '),
2499 format_field(video_id, template='%s: '),
526d74ec 2500 self.msg,
1151c407 2501 format_field(cause, template=' (caused by %r)'),
2502 '' if expected else bug_reports_message())))
1c256f70 2503
01951dda
PH
2504 def format_traceback(self):
2505 if self.traceback is None:
2506 return None
28e614de 2507 return ''.join(traceback.format_tb(self.traceback))
01951dda 2508
1c256f70 2509
416c7fcb
PH
2510class UnsupportedError(ExtractorError):
2511 def __init__(self, url):
2512 super(UnsupportedError, self).__init__(
2513 'Unsupported URL: %s' % url, expected=True)
2514 self.url = url
2515
2516
55b3e45b
JMF
2517class RegexNotFoundError(ExtractorError):
2518 """Error when a regex didn't match"""
2519 pass
2520
2521
773f291d
S
2522class GeoRestrictedError(ExtractorError):
2523 """Geographic restriction Error exception.
2524
2525 This exception may be thrown when a video is not available from your
2526 geographic location due to geographic restrictions imposed by a website.
2527 """
b6e0c7d2 2528
0db3bae8 2529 def __init__(self, msg, countries=None, **kwargs):
2530 kwargs['expected'] = True
2531 super(GeoRestrictedError, self).__init__(msg, **kwargs)
773f291d
S
2532 self.countries = countries
2533
2534
bf5b9d85 2535class DownloadError(YoutubeDLError):
59ae15a5 2536 """Download Error exception.
d77c3dfd 2537
59ae15a5
PH
2538 This exception may be thrown by FileDownloader objects if they are not
2539 configured to continue on errors. They will contain the appropriate
2540 error message.
2541 """
5f6a1245 2542
8cc83b8d
FV
2543 def __init__(self, msg, exc_info=None):
2544 """ exc_info, if given, is the original exception that caused the trouble (as returned by sys.exc_info()). """
2545 super(DownloadError, self).__init__(msg)
2546 self.exc_info = exc_info
d77c3dfd
FV
2547
2548
498f5606 2549class EntryNotInPlaylist(YoutubeDLError):
2550 """Entry not in playlist exception.
2551
2552 This exception will be thrown by YoutubeDL when a requested entry
2553 is not found in the playlist info_dict
2554 """
aa9369a2 2555 msg = 'Entry not found in info'
498f5606 2556
2557
bf5b9d85 2558class SameFileError(YoutubeDLError):
59ae15a5 2559 """Same File exception.
d77c3dfd 2560
59ae15a5
PH
2561 This exception will be thrown by FileDownloader objects if they detect
2562 multiple files would have to be downloaded to the same file on disk.
2563 """
aa9369a2 2564 msg = 'Fixed output name but more than one file to download'
2565
2566 def __init__(self, filename=None):
2567 if filename is not None:
2568 self.msg += f': {filename}'
2569 super().__init__(self.msg)
d77c3dfd
FV
2570
2571
bf5b9d85 2572class PostProcessingError(YoutubeDLError):
59ae15a5 2573 """Post Processing exception.
d77c3dfd 2574
59ae15a5
PH
2575 This exception may be raised by PostProcessor's .run() method to
2576 indicate an error in the postprocessing task.
2577 """
5f6a1245 2578
5f6a1245 2579
48f79687 2580class DownloadCancelled(YoutubeDLError):
2581 """ Exception raised when the download queue should be interrupted """
2582 msg = 'The download was cancelled'
8b0d7497 2583
8b0d7497 2584
48f79687 2585class ExistingVideoReached(DownloadCancelled):
2586 """ --break-on-existing triggered """
2587 msg = 'Encountered a video that is already in the archive, stopping due to --break-on-existing'
8b0d7497 2588
48f79687 2589
2590class RejectedVideoReached(DownloadCancelled):
2591 """ --break-on-reject triggered """
2592 msg = 'Encountered a video that did not match filter, stopping due to --break-on-reject'
51d9739f 2593
2594
48f79687 2595class MaxDownloadsReached(DownloadCancelled):
59ae15a5 2596 """ --max-downloads limit has been reached. """
48f79687 2597 msg = 'Maximum number of downloads reached, stopping due to --max-downloads'
2598
2599
f2ebc5c7 2600class ReExtractInfo(YoutubeDLError):
2601 """ Video info needs to be re-extracted. """
2602
2603 def __init__(self, msg, expected=False):
2604 super().__init__(msg)
2605 self.expected = expected
2606
2607
2608class ThrottledDownload(ReExtractInfo):
48f79687 2609 """ Download speed below --throttled-rate. """
aa9369a2 2610 msg = 'The download speed is below throttle limit'
d77c3dfd 2611
43b22906 2612 def __init__(self):
2613 super().__init__(self.msg, expected=False)
f2ebc5c7 2614
d77c3dfd 2615
bf5b9d85 2616class UnavailableVideoError(YoutubeDLError):
59ae15a5 2617 """Unavailable Format exception.
d77c3dfd 2618
59ae15a5
PH
2619 This exception will be thrown when a video is requested
2620 in a format that is not available for that video.
2621 """
aa9369a2 2622 msg = 'Unable to download video'
2623
2624 def __init__(self, err=None):
2625 if err is not None:
2626 self.msg += f': {err}'
2627 super().__init__(self.msg)
d77c3dfd
FV
2628
2629
bf5b9d85 2630class ContentTooShortError(YoutubeDLError):
59ae15a5 2631 """Content Too Short exception.
d77c3dfd 2632
59ae15a5
PH
2633 This exception may be raised by FileDownloader objects when a file they
2634 download is too small for what the server announced first, indicating
2635 the connection was probably interrupted.
2636 """
d77c3dfd 2637
59ae15a5 2638 def __init__(self, downloaded, expected):
bf5b9d85
PM
2639 super(ContentTooShortError, self).__init__(
2640 'Downloaded {0} bytes, expected {1} bytes'.format(downloaded, expected)
2641 )
2c7ed247 2642 # Both in bytes
59ae15a5
PH
2643 self.downloaded = downloaded
2644 self.expected = expected
d77c3dfd 2645
5f6a1245 2646
bf5b9d85 2647class XAttrMetadataError(YoutubeDLError):
efa97bdc
YCH
2648 def __init__(self, code=None, msg='Unknown error'):
2649 super(XAttrMetadataError, self).__init__(msg)
2650 self.code = code
bd264412 2651 self.msg = msg
efa97bdc
YCH
2652
2653 # Parsing code and msg
3089bc74 2654 if (self.code in (errno.ENOSPC, errno.EDQUOT)
a0566bbf 2655 or 'No space left' in self.msg or 'Disk quota exceeded' in self.msg):
efa97bdc
YCH
2656 self.reason = 'NO_SPACE'
2657 elif self.code == errno.E2BIG or 'Argument list too long' in self.msg:
2658 self.reason = 'VALUE_TOO_LONG'
2659 else:
2660 self.reason = 'NOT_SUPPORTED'
2661
2662
bf5b9d85 2663class XAttrUnavailableError(YoutubeDLError):
efa97bdc
YCH
2664 pass
2665
2666
c5a59d93 2667def _create_http_connection(ydl_handler, http_class, is_https, *args, **kwargs):
e5e78797
S
2668 # Working around python 2 bug (see http://bugs.python.org/issue17849) by limiting
2669 # expected HTTP responses to meet HTTP/1.0 or later (see also
067aa17e 2670 # https://github.com/ytdl-org/youtube-dl/issues/6727)
e5e78797 2671 if sys.version_info < (3, 0):
65220c3b
S
2672 kwargs['strict'] = True
2673 hc = http_class(*args, **compat_kwargs(kwargs))
be4a824d 2674 source_address = ydl_handler._params.get('source_address')
8959018a 2675
be4a824d 2676 if source_address is not None:
8959018a
AU
2677 # This is to workaround _create_connection() from socket where it will try all
2678 # address data from getaddrinfo() including IPv6. This filters the result from
2679 # getaddrinfo() based on the source_address value.
2680 # This is based on the cpython socket.create_connection() function.
2681 # https://github.com/python/cpython/blob/master/Lib/socket.py#L691
2682 def _create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None):
2683 host, port = address
2684 err = None
2685 addrs = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
9e21e6d9
S
2686 af = socket.AF_INET if '.' in source_address[0] else socket.AF_INET6
2687 ip_addrs = [addr for addr in addrs if addr[0] == af]
2688 if addrs and not ip_addrs:
2689 ip_version = 'v4' if af == socket.AF_INET else 'v6'
2690 raise socket.error(
2691 "No remote IP%s addresses available for connect, can't use '%s' as source address"
2692 % (ip_version, source_address[0]))
8959018a
AU
2693 for res in ip_addrs:
2694 af, socktype, proto, canonname, sa = res
2695 sock = None
2696 try:
2697 sock = socket.socket(af, socktype, proto)
2698 if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
2699 sock.settimeout(timeout)
2700 sock.bind(source_address)
2701 sock.connect(sa)
2702 err = None # Explicitly break reference cycle
2703 return sock
2704 except socket.error as _:
2705 err = _
2706 if sock is not None:
2707 sock.close()
2708 if err is not None:
2709 raise err
2710 else:
9e21e6d9
S
2711 raise socket.error('getaddrinfo returns an empty list')
2712 if hasattr(hc, '_create_connection'):
2713 hc._create_connection = _create_connection
be4a824d
PH
2714 sa = (source_address, 0)
2715 if hasattr(hc, 'source_address'): # Python 2.7+
2716 hc.source_address = sa
2717 else: # Python 2.6
2718 def _hc_connect(self, *args, **kwargs):
9e21e6d9 2719 sock = _create_connection(
be4a824d
PH
2720 (self.host, self.port), self.timeout, sa)
2721 if is_https:
d7932313
PH
2722 self.sock = ssl.wrap_socket(
2723 sock, self.key_file, self.cert_file,
2724 ssl_version=ssl.PROTOCOL_TLSv1)
be4a824d
PH
2725 else:
2726 self.sock = sock
2727 hc.connect = functools.partial(_hc_connect, hc)
2728
2729 return hc
2730
2731
87f0e62d 2732def handle_youtubedl_headers(headers):
992fc9d6
YCH
2733 filtered_headers = headers
2734
2735 if 'Youtubedl-no-compression' in filtered_headers:
2736 filtered_headers = dict((k, v) for k, v in filtered_headers.items() if k.lower() != 'accept-encoding')
87f0e62d 2737 del filtered_headers['Youtubedl-no-compression']
87f0e62d 2738
992fc9d6 2739 return filtered_headers
87f0e62d
YCH
2740
2741
acebc9cd 2742class YoutubeDLHandler(compat_urllib_request.HTTPHandler):
59ae15a5
PH
2743 """Handler for HTTP requests and responses.
2744
2745 This class, when installed with an OpenerDirector, automatically adds
2746 the standard headers to every HTTP request and handles gzipped and
2747 deflated responses from web servers. If compression is to be avoided in
2748 a particular request, the original request in the program code only has
0424ec30 2749 to include the HTTP header "Youtubedl-no-compression", which will be
59ae15a5
PH
2750 removed before making the real request.
2751
2752 Part of this code was copied from:
2753
2754 http://techknack.net/python-urllib2-handlers/
2755
2756 Andrew Rowls, the author of that code, agreed to release it to the
2757 public domain.
2758 """
2759
be4a824d
PH
2760 def __init__(self, params, *args, **kwargs):
2761 compat_urllib_request.HTTPHandler.__init__(self, *args, **kwargs)
2762 self._params = params
2763
2764 def http_open(self, req):
71aff188
YCH
2765 conn_class = compat_http_client.HTTPConnection
2766
2767 socks_proxy = req.headers.get('Ytdl-socks-proxy')
2768 if socks_proxy:
2769 conn_class = make_socks_conn_class(conn_class, socks_proxy)
2770 del req.headers['Ytdl-socks-proxy']
2771
be4a824d 2772 return self.do_open(functools.partial(
71aff188 2773 _create_http_connection, self, conn_class, False),
be4a824d
PH
2774 req)
2775
59ae15a5
PH
2776 @staticmethod
2777 def deflate(data):
fc2119f2 2778 if not data:
2779 return data
59ae15a5
PH
2780 try:
2781 return zlib.decompress(data, -zlib.MAX_WBITS)
2782 except zlib.error:
2783 return zlib.decompress(data)
2784
acebc9cd 2785 def http_request(self, req):
51f267d9
S
2786 # According to RFC 3986, URLs can not contain non-ASCII characters, however this is not
2787 # always respected by websites, some tend to give out URLs with non percent-encoded
2788 # non-ASCII characters (see telemb.py, ard.py [#3412])
2789 # urllib chokes on URLs with non-ASCII characters (see http://bugs.python.org/issue3991)
2790 # To work around aforementioned issue we will replace request's original URL with
2791 # percent-encoded one
2792 # Since redirects are also affected (e.g. http://www.southpark.de/alle-episoden/s18e09)
2793 # the code of this workaround has been moved here from YoutubeDL.urlopen()
2794 url = req.get_full_url()
2795 url_escaped = escape_url(url)
2796
2797 # Substitute URL if any change after escaping
2798 if url != url_escaped:
15d260eb 2799 req = update_Request(req, url=url_escaped)
51f267d9 2800
33ac271b 2801 for h, v in std_headers.items():
3d5f7a39
JK
2802 # Capitalize is needed because of Python bug 2275: http://bugs.python.org/issue2275
2803 # The dict keys are capitalized because of this bug by urllib
2804 if h.capitalize() not in req.headers:
33ac271b 2805 req.add_header(h, v)
87f0e62d
YCH
2806
2807 req.headers = handle_youtubedl_headers(req.headers)
989b4b2b
PH
2808
2809 if sys.version_info < (2, 7) and '#' in req.get_full_url():
2810 # Python 2.6 is brain-dead when it comes to fragments
2811 req._Request__original = req._Request__original.partition('#')[0]
2812 req._Request__r_type = req._Request__r_type.partition('#')[0]
2813
59ae15a5
PH
2814 return req
2815
acebc9cd 2816 def http_response(self, req, resp):
59ae15a5
PH
2817 old_resp = resp
2818 # gzip
2819 if resp.headers.get('Content-encoding', '') == 'gzip':
aa3e9507
PH
2820 content = resp.read()
2821 gz = gzip.GzipFile(fileobj=io.BytesIO(content), mode='rb')
2822 try:
2823 uncompressed = io.BytesIO(gz.read())
2824 except IOError as original_ioerror:
2825 # There may be junk add the end of the file
2826 # See http://stackoverflow.com/q/4928560/35070 for details
2827 for i in range(1, 1024):
2828 try:
2829 gz = gzip.GzipFile(fileobj=io.BytesIO(content[:-i]), mode='rb')
2830 uncompressed = io.BytesIO(gz.read())
2831 except IOError:
2832 continue
2833 break
2834 else:
2835 raise original_ioerror
b407d853 2836 resp = compat_urllib_request.addinfourl(uncompressed, old_resp.headers, old_resp.url, old_resp.code)
59ae15a5 2837 resp.msg = old_resp.msg
c047270c 2838 del resp.headers['Content-encoding']
59ae15a5
PH
2839 # deflate
2840 if resp.headers.get('Content-encoding', '') == 'deflate':
2841 gz = io.BytesIO(self.deflate(resp.read()))
b407d853 2842 resp = compat_urllib_request.addinfourl(gz, old_resp.headers, old_resp.url, old_resp.code)
59ae15a5 2843 resp.msg = old_resp.msg
c047270c 2844 del resp.headers['Content-encoding']
ad729172 2845 # Percent-encode redirect URL of Location HTTP header to satisfy RFC 3986 (see
067aa17e 2846 # https://github.com/ytdl-org/youtube-dl/issues/6457).
5a4d9ddb
S
2847 if 300 <= resp.code < 400:
2848 location = resp.headers.get('Location')
2849 if location:
2850 # As of RFC 2616 default charset is iso-8859-1 that is respected by python 3
2851 if sys.version_info >= (3, 0):
2852 location = location.encode('iso-8859-1').decode('utf-8')
0ea59007
YCH
2853 else:
2854 location = location.decode('utf-8')
5a4d9ddb
S
2855 location_escaped = escape_url(location)
2856 if location != location_escaped:
2857 del resp.headers['Location']
9a4aec8b
YCH
2858 if sys.version_info < (3, 0):
2859 location_escaped = location_escaped.encode('utf-8')
5a4d9ddb 2860 resp.headers['Location'] = location_escaped
59ae15a5 2861 return resp
0f8d03f8 2862
acebc9cd
PH
2863 https_request = http_request
2864 https_response = http_response
bf50b038 2865
5de90176 2866
71aff188
YCH
2867def make_socks_conn_class(base_class, socks_proxy):
2868 assert issubclass(base_class, (
2869 compat_http_client.HTTPConnection, compat_http_client.HTTPSConnection))
2870
2871 url_components = compat_urlparse.urlparse(socks_proxy)
2872 if url_components.scheme.lower() == 'socks5':
2873 socks_type = ProxyType.SOCKS5
2874 elif url_components.scheme.lower() in ('socks', 'socks4'):
2875 socks_type = ProxyType.SOCKS4
51fb4995
YCH
2876 elif url_components.scheme.lower() == 'socks4a':
2877 socks_type = ProxyType.SOCKS4A
71aff188 2878
cdd94c2e
YCH
2879 def unquote_if_non_empty(s):
2880 if not s:
2881 return s
2882 return compat_urllib_parse_unquote_plus(s)
2883
71aff188
YCH
2884 proxy_args = (
2885 socks_type,
2886 url_components.hostname, url_components.port or 1080,
2887 True, # Remote DNS
cdd94c2e
YCH
2888 unquote_if_non_empty(url_components.username),
2889 unquote_if_non_empty(url_components.password),
71aff188
YCH
2890 )
2891
2892 class SocksConnection(base_class):
2893 def connect(self):
2894 self.sock = sockssocket()
2895 self.sock.setproxy(*proxy_args)
2896 if type(self.timeout) in (int, float):
2897 self.sock.settimeout(self.timeout)
2898 self.sock.connect((self.host, self.port))
2899
2900 if isinstance(self, compat_http_client.HTTPSConnection):
2901 if hasattr(self, '_context'): # Python > 2.6
2902 self.sock = self._context.wrap_socket(
2903 self.sock, server_hostname=self.host)
2904 else:
2905 self.sock = ssl.wrap_socket(self.sock)
2906
2907 return SocksConnection
2908
2909
be4a824d
PH
2910class YoutubeDLHTTPSHandler(compat_urllib_request.HTTPSHandler):
2911 def __init__(self, params, https_conn_class=None, *args, **kwargs):
2912 compat_urllib_request.HTTPSHandler.__init__(self, *args, **kwargs)
2913 self._https_conn_class = https_conn_class or compat_http_client.HTTPSConnection
2914 self._params = params
2915
2916 def https_open(self, req):
4f264c02 2917 kwargs = {}
71aff188
YCH
2918 conn_class = self._https_conn_class
2919
4f264c02
JMF
2920 if hasattr(self, '_context'): # python > 2.6
2921 kwargs['context'] = self._context
2922 if hasattr(self, '_check_hostname'): # python 3.x
2923 kwargs['check_hostname'] = self._check_hostname
71aff188
YCH
2924
2925 socks_proxy = req.headers.get('Ytdl-socks-proxy')
2926 if socks_proxy:
2927 conn_class = make_socks_conn_class(conn_class, socks_proxy)
2928 del req.headers['Ytdl-socks-proxy']
2929
be4a824d 2930 return self.do_open(functools.partial(
71aff188 2931 _create_http_connection, self, conn_class, True),
4f264c02 2932 req, **kwargs)
be4a824d
PH
2933
2934
1bab3437 2935class YoutubeDLCookieJar(compat_cookiejar.MozillaCookieJar):
f1a8511f
S
2936 """
2937 See [1] for cookie file format.
2938
2939 1. https://curl.haxx.se/docs/http-cookies.html
2940 """
e7e62441 2941 _HTTPONLY_PREFIX = '#HttpOnly_'
c380cc28
S
2942 _ENTRY_LEN = 7
2943 _HEADER = '''# Netscape HTTP Cookie File
7a5c1cfe 2944# This file is generated by yt-dlp. Do not edit.
c380cc28
S
2945
2946'''
2947 _CookieFileEntry = collections.namedtuple(
2948 'CookieFileEntry',
2949 ('domain_name', 'include_subdomains', 'path', 'https_only', 'expires_at', 'name', 'value'))
e7e62441 2950
1bab3437 2951 def save(self, filename=None, ignore_discard=False, ignore_expires=False):
c380cc28
S
2952 """
2953 Save cookies to a file.
2954
2955 Most of the code is taken from CPython 3.8 and slightly adapted
2956 to support cookie files with UTF-8 in both python 2 and 3.
2957 """
2958 if filename is None:
2959 if self.filename is not None:
2960 filename = self.filename
2961 else:
2962 raise ValueError(compat_cookiejar.MISSING_FILENAME_TEXT)
2963
1bab3437
S
2964 # Store session cookies with `expires` set to 0 instead of an empty
2965 # string
2966 for cookie in self:
2967 if cookie.expires is None:
2968 cookie.expires = 0
c380cc28
S
2969
2970 with io.open(filename, 'w', encoding='utf-8') as f:
2971 f.write(self._HEADER)
2972 now = time.time()
2973 for cookie in self:
2974 if not ignore_discard and cookie.discard:
2975 continue
2976 if not ignore_expires and cookie.is_expired(now):
2977 continue
2978 if cookie.secure:
2979 secure = 'TRUE'
2980 else:
2981 secure = 'FALSE'
2982 if cookie.domain.startswith('.'):
2983 initial_dot = 'TRUE'
2984 else:
2985 initial_dot = 'FALSE'
2986 if cookie.expires is not None:
2987 expires = compat_str(cookie.expires)
2988 else:
2989 expires = ''
2990 if cookie.value is None:
2991 # cookies.txt regards 'Set-Cookie: foo' as a cookie
2992 # with no name, whereas http.cookiejar regards it as a
2993 # cookie with no value.
2994 name = ''
2995 value = cookie.name
2996 else:
2997 name = cookie.name
2998 value = cookie.value
2999 f.write(
3000 '\t'.join([cookie.domain, initial_dot, cookie.path,
3001 secure, expires, name, value]) + '\n')
1bab3437
S
3002
3003 def load(self, filename=None, ignore_discard=False, ignore_expires=False):
e7e62441 3004 """Load cookies from a file."""
3005 if filename is None:
3006 if self.filename is not None:
3007 filename = self.filename
3008 else:
3009 raise ValueError(compat_cookiejar.MISSING_FILENAME_TEXT)
3010
c380cc28
S
3011 def prepare_line(line):
3012 if line.startswith(self._HTTPONLY_PREFIX):
3013 line = line[len(self._HTTPONLY_PREFIX):]
3014 # comments and empty lines are fine
3015 if line.startswith('#') or not line.strip():
3016 return line
3017 cookie_list = line.split('\t')
3018 if len(cookie_list) != self._ENTRY_LEN:
3019 raise compat_cookiejar.LoadError('invalid length %d' % len(cookie_list))
3020 cookie = self._CookieFileEntry(*cookie_list)
3021 if cookie.expires_at and not cookie.expires_at.isdigit():
3022 raise compat_cookiejar.LoadError('invalid expires at %s' % cookie.expires_at)
3023 return line
3024
e7e62441 3025 cf = io.StringIO()
c380cc28 3026 with io.open(filename, encoding='utf-8') as f:
e7e62441 3027 for line in f:
c380cc28
S
3028 try:
3029 cf.write(prepare_line(line))
3030 except compat_cookiejar.LoadError as e:
3031 write_string(
3032 'WARNING: skipping cookie file entry due to %s: %r\n'
3033 % (e, line), sys.stderr)
3034 continue
e7e62441 3035 cf.seek(0)
3036 self._really_load(cf, filename, ignore_discard, ignore_expires)
1bab3437
S
3037 # Session cookies are denoted by either `expires` field set to
3038 # an empty string or 0. MozillaCookieJar only recognizes the former
3039 # (see [1]). So we need force the latter to be recognized as session
3040 # cookies on our own.
3041 # Session cookies may be important for cookies-based authentication,
3042 # e.g. usually, when user does not check 'Remember me' check box while
3043 # logging in on a site, some important cookies are stored as session
3044 # cookies so that not recognizing them will result in failed login.
3045 # 1. https://bugs.python.org/issue17164
3046 for cookie in self:
3047 # Treat `expires=0` cookies as session cookies
3048 if cookie.expires == 0:
3049 cookie.expires = None
3050 cookie.discard = True
3051
3052
a6420bf5
S
3053class YoutubeDLCookieProcessor(compat_urllib_request.HTTPCookieProcessor):
3054 def __init__(self, cookiejar=None):
3055 compat_urllib_request.HTTPCookieProcessor.__init__(self, cookiejar)
3056
3057 def http_response(self, request, response):
3058 # Python 2 will choke on next HTTP request in row if there are non-ASCII
3059 # characters in Set-Cookie HTTP header of last response (see
067aa17e 3060 # https://github.com/ytdl-org/youtube-dl/issues/6769).
a6420bf5
S
3061 # In order to at least prevent crashing we will percent encode Set-Cookie
3062 # header before HTTPCookieProcessor starts processing it.
e28034c5
S
3063 # if sys.version_info < (3, 0) and response.headers:
3064 # for set_cookie_header in ('Set-Cookie', 'Set-Cookie2'):
3065 # set_cookie = response.headers.get(set_cookie_header)
3066 # if set_cookie:
3067 # set_cookie_escaped = compat_urllib_parse.quote(set_cookie, b"%/;:@&=+$,!~*'()?#[] ")
3068 # if set_cookie != set_cookie_escaped:
3069 # del response.headers[set_cookie_header]
3070 # response.headers[set_cookie_header] = set_cookie_escaped
a6420bf5
S
3071 return compat_urllib_request.HTTPCookieProcessor.http_response(self, request, response)
3072
f5fa042c 3073 https_request = compat_urllib_request.HTTPCookieProcessor.http_request
a6420bf5
S
3074 https_response = http_response
3075
3076
fca6dba8 3077class YoutubeDLRedirectHandler(compat_urllib_request.HTTPRedirectHandler):
201c1459 3078 """YoutubeDL redirect handler
3079
3080 The code is based on HTTPRedirectHandler implementation from CPython [1].
3081
3082 This redirect handler solves two issues:
3083 - ensures redirect URL is always unicode under python 2
3084 - introduces support for experimental HTTP response status code
3085 308 Permanent Redirect [2] used by some sites [3]
3086
3087 1. https://github.com/python/cpython/blob/master/Lib/urllib/request.py
3088 2. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/308
3089 3. https://github.com/ytdl-org/youtube-dl/issues/28768
3090 """
3091
3092 http_error_301 = http_error_303 = http_error_307 = http_error_308 = compat_urllib_request.HTTPRedirectHandler.http_error_302
3093
3094 def redirect_request(self, req, fp, code, msg, headers, newurl):
3095 """Return a Request or None in response to a redirect.
3096
3097 This is called by the http_error_30x methods when a
3098 redirection response is received. If a redirection should
3099 take place, return a new Request to allow http_error_30x to
3100 perform the redirect. Otherwise, raise HTTPError if no-one
3101 else should try to handle this url. Return None if you can't
3102 but another Handler might.
3103 """
3104 m = req.get_method()
3105 if (not (code in (301, 302, 303, 307, 308) and m in ("GET", "HEAD")
3106 or code in (301, 302, 303) and m == "POST")):
3107 raise compat_HTTPError(req.full_url, code, msg, headers, fp)
3108 # Strictly (according to RFC 2616), 301 or 302 in response to
3109 # a POST MUST NOT cause a redirection without confirmation
3110 # from the user (of urllib.request, in this case). In practice,
3111 # essentially all clients do redirect in this case, so we do
3112 # the same.
3113
3114 # On python 2 urlh.geturl() may sometimes return redirect URL
3115 # as byte string instead of unicode. This workaround allows
3116 # to force it always return unicode.
3117 if sys.version_info[0] < 3:
3118 newurl = compat_str(newurl)
3119
3120 # Be conciliant with URIs containing a space. This is mainly
3121 # redundant with the more complete encoding done in http_error_302(),
3122 # but it is kept for compatibility with other callers.
3123 newurl = newurl.replace(' ', '%20')
3124
3125 CONTENT_HEADERS = ("content-length", "content-type")
3126 # NB: don't use dict comprehension for python 2.6 compatibility
3127 newheaders = dict((k, v) for k, v in req.headers.items()
3128 if k.lower() not in CONTENT_HEADERS)
3129 return compat_urllib_request.Request(
3130 newurl, headers=newheaders, origin_req_host=req.origin_req_host,
3131 unverifiable=True)
fca6dba8
S
3132
3133
46f59e89
S
3134def extract_timezone(date_str):
3135 m = re.search(
f137e4c2 3136 r'''(?x)
3137 ^.{8,}? # >=8 char non-TZ prefix, if present
3138 (?P<tz>Z| # just the UTC Z, or
3139 (?:(?<=.\b\d{4}|\b\d{2}:\d\d)| # preceded by 4 digits or hh:mm or
3140 (?<!.\b[a-zA-Z]{3}|[a-zA-Z]{4}|..\b\d\d)) # not preceded by 3 alpha word or >= 4 alpha or 2 digits
3141 [ ]? # optional space
3142 (?P<sign>\+|-) # +/-
3143 (?P<hours>[0-9]{2}):?(?P<minutes>[0-9]{2}) # hh[:]mm
3144 $)
3145 ''', date_str)
46f59e89
S
3146 if not m:
3147 timezone = datetime.timedelta()
3148 else:
3149 date_str = date_str[:-len(m.group('tz'))]
3150 if not m.group('sign'):
3151 timezone = datetime.timedelta()
3152 else:
3153 sign = 1 if m.group('sign') == '+' else -1
3154 timezone = datetime.timedelta(
3155 hours=sign * int(m.group('hours')),
3156 minutes=sign * int(m.group('minutes')))
3157 return timezone, date_str
3158
3159
08b38d54 3160def parse_iso8601(date_str, delimiter='T', timezone=None):
912b38b4
PH
3161 """ Return a UNIX timestamp from the given date """
3162
3163 if date_str is None:
3164 return None
3165
52c3a6e4
S
3166 date_str = re.sub(r'\.[0-9]+', '', date_str)
3167
08b38d54 3168 if timezone is None:
46f59e89
S
3169 timezone, date_str = extract_timezone(date_str)
3170
52c3a6e4
S
3171 try:
3172 date_format = '%Y-%m-%d{0}%H:%M:%S'.format(delimiter)
3173 dt = datetime.datetime.strptime(date_str, date_format) - timezone
3174 return calendar.timegm(dt.timetuple())
3175 except ValueError:
3176 pass
912b38b4
PH
3177
3178
46f59e89
S
3179def date_formats(day_first=True):
3180 return DATE_FORMATS_DAY_FIRST if day_first else DATE_FORMATS_MONTH_FIRST
3181
3182
42bdd9d0 3183def unified_strdate(date_str, day_first=True):
bf50b038 3184 """Return a string with the date in the format YYYYMMDD"""
64e7ad60
PH
3185
3186 if date_str is None:
3187 return None
bf50b038 3188 upload_date = None
5f6a1245 3189 # Replace commas
026fcc04 3190 date_str = date_str.replace(',', ' ')
42bdd9d0 3191 # Remove AM/PM + timezone
9bb8e0a3 3192 date_str = re.sub(r'(?i)\s*(?:AM|PM)(?:\s+[A-Z]+)?', '', date_str)
46f59e89 3193 _, date_str = extract_timezone(date_str)
42bdd9d0 3194
46f59e89 3195 for expression in date_formats(day_first):
bf50b038
JMF
3196 try:
3197 upload_date = datetime.datetime.strptime(date_str, expression).strftime('%Y%m%d')
5de90176 3198 except ValueError:
bf50b038 3199 pass
42393ce2
PH
3200 if upload_date is None:
3201 timetuple = email.utils.parsedate_tz(date_str)
3202 if timetuple:
c6b9cf05
S
3203 try:
3204 upload_date = datetime.datetime(*timetuple[:6]).strftime('%Y%m%d')
3205 except ValueError:
3206 pass
6a750402
JMF
3207 if upload_date is not None:
3208 return compat_str(upload_date)
bf50b038 3209
5f6a1245 3210
46f59e89
S
3211def unified_timestamp(date_str, day_first=True):
3212 if date_str is None:
3213 return None
3214
2ae2ffda 3215 date_str = re.sub(r'[,|]', '', date_str)
46f59e89 3216
7dc2a74e 3217 pm_delta = 12 if re.search(r'(?i)PM', date_str) else 0
46f59e89
S
3218 timezone, date_str = extract_timezone(date_str)
3219
3220 # Remove AM/PM + timezone
3221 date_str = re.sub(r'(?i)\s*(?:AM|PM)(?:\s+[A-Z]+)?', '', date_str)
3222
deef3195
S
3223 # Remove unrecognized timezones from ISO 8601 alike timestamps
3224 m = re.search(r'\d{1,2}:\d{1,2}(?:\.\d+)?(?P<tz>\s*[A-Z]+)$', date_str)
3225 if m:
3226 date_str = date_str[:-len(m.group('tz'))]
3227
f226880c
PH
3228 # Python only supports microseconds, so remove nanoseconds
3229 m = re.search(r'^([0-9]{4,}-[0-9]{1,2}-[0-9]{1,2}T[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}\.[0-9]{6})[0-9]+$', date_str)
3230 if m:
3231 date_str = m.group(1)
3232
46f59e89
S
3233 for expression in date_formats(day_first):
3234 try:
7dc2a74e 3235 dt = datetime.datetime.strptime(date_str, expression) - timezone + datetime.timedelta(hours=pm_delta)
46f59e89
S
3236 return calendar.timegm(dt.timetuple())
3237 except ValueError:
3238 pass
3239 timetuple = email.utils.parsedate_tz(date_str)
3240 if timetuple:
7dc2a74e 3241 return calendar.timegm(timetuple) + pm_delta * 3600
46f59e89
S
3242
3243
28e614de 3244def determine_ext(url, default_ext='unknown_video'):
85750f89 3245 if url is None or '.' not in url:
f4776371 3246 return default_ext
9cb9a5df 3247 guess = url.partition('?')[0].rpartition('.')[2]
73e79f2a
PH
3248 if re.match(r'^[A-Za-z0-9]+$', guess):
3249 return guess
a7aaa398
S
3250 # Try extract ext from URLs like http://example.com/foo/bar.mp4/?download
3251 elif guess.rstrip('/') in KNOWN_EXTENSIONS:
9cb9a5df 3252 return guess.rstrip('/')
73e79f2a 3253 else:
cbdbb766 3254 return default_ext
73e79f2a 3255
5f6a1245 3256
824fa511
S
3257def subtitles_filename(filename, sub_lang, sub_format, expected_real_ext=None):
3258 return replace_extension(filename, sub_lang + '.' + sub_format, expected_real_ext)
d4051a8e 3259
5f6a1245 3260
9e62f283 3261def datetime_from_str(date_str, precision='auto', format='%Y%m%d'):
37254abc
JMF
3262 """
3263 Return a datetime object from a string in the format YYYYMMDD or
9e62f283 3264 (now|today|date)[+-][0-9](microsecond|second|minute|hour|day|week|month|year)(s)?
3265
3266 format: string date format used to return datetime object from
3267 precision: round the time portion of a datetime object.
3268 auto|microsecond|second|minute|hour|day.
3269 auto: round to the unit provided in date_str (if applicable).
3270 """
3271 auto_precision = False
3272 if precision == 'auto':
3273 auto_precision = True
3274 precision = 'microsecond'
3275 today = datetime_round(datetime.datetime.now(), precision)
f8795e10 3276 if date_str in ('now', 'today'):
37254abc 3277 return today
f8795e10
PH
3278 if date_str == 'yesterday':
3279 return today - datetime.timedelta(days=1)
9e62f283 3280 match = re.match(
3281 r'(?P<start>.+)(?P<sign>[+-])(?P<time>\d+)(?P<unit>microsecond|second|minute|hour|day|week|month|year)(s)?',
3282 date_str)
37254abc 3283 if match is not None:
9e62f283 3284 start_time = datetime_from_str(match.group('start'), precision, format)
3285 time = int(match.group('time')) * (-1 if match.group('sign') == '-' else 1)
37254abc 3286 unit = match.group('unit')
9e62f283 3287 if unit == 'month' or unit == 'year':
3288 new_date = datetime_add_months(start_time, time * 12 if unit == 'year' else time)
37254abc 3289 unit = 'day'
9e62f283 3290 else:
3291 if unit == 'week':
3292 unit = 'day'
3293 time *= 7
3294 delta = datetime.timedelta(**{unit + 's': time})
3295 new_date = start_time + delta
3296 if auto_precision:
3297 return datetime_round(new_date, unit)
3298 return new_date
3299
3300 return datetime_round(datetime.datetime.strptime(date_str, format), precision)
3301
3302
3303def date_from_str(date_str, format='%Y%m%d'):
3304 """
3305 Return a datetime object from a string in the format YYYYMMDD or
3306 (now|today|date)[+-][0-9](microsecond|second|minute|hour|day|week|month|year)(s)?
3307
3308 format: string date format used to return datetime object from
3309 """
3310 return datetime_from_str(date_str, precision='microsecond', format=format).date()
3311
3312
3313def datetime_add_months(dt, months):
3314 """Increment/Decrement a datetime object by months."""
3315 month = dt.month + months - 1
3316 year = dt.year + month // 12
3317 month = month % 12 + 1
3318 day = min(dt.day, calendar.monthrange(year, month)[1])
3319 return dt.replace(year, month, day)
3320
3321
3322def datetime_round(dt, precision='day'):
3323 """
3324 Round a datetime object's time to a specific precision
3325 """
3326 if precision == 'microsecond':
3327 return dt
3328
3329 unit_seconds = {
3330 'day': 86400,
3331 'hour': 3600,
3332 'minute': 60,
3333 'second': 1,
3334 }
3335 roundto = lambda x, n: ((x + n / 2) // n) * n
3336 timestamp = calendar.timegm(dt.timetuple())
3337 return datetime.datetime.utcfromtimestamp(roundto(timestamp, unit_seconds[precision]))
5f6a1245
JW
3338
3339
e63fc1be 3340def hyphenate_date(date_str):
3341 """
3342 Convert a date in 'YYYYMMDD' format to 'YYYY-MM-DD' format"""
3343 match = re.match(r'^(\d\d\d\d)(\d\d)(\d\d)$', date_str)
3344 if match is not None:
3345 return '-'.join(match.groups())
3346 else:
3347 return date_str
3348
5f6a1245 3349
bd558525
JMF
3350class DateRange(object):
3351 """Represents a time interval between two dates"""
5f6a1245 3352
bd558525
JMF
3353 def __init__(self, start=None, end=None):
3354 """start and end must be strings in the format accepted by date"""
3355 if start is not None:
3356 self.start = date_from_str(start)
3357 else:
3358 self.start = datetime.datetime.min.date()
3359 if end is not None:
3360 self.end = date_from_str(end)
3361 else:
3362 self.end = datetime.datetime.max.date()
37254abc 3363 if self.start > self.end:
bd558525 3364 raise ValueError('Date range: "%s" , the start date must be before the end date' % self)
5f6a1245 3365
bd558525
JMF
3366 @classmethod
3367 def day(cls, day):
3368 """Returns a range that only contains the given day"""
5f6a1245
JW
3369 return cls(day, day)
3370
bd558525
JMF
3371 def __contains__(self, date):
3372 """Check if the date is in the range"""
37254abc
JMF
3373 if not isinstance(date, datetime.date):
3374 date = date_from_str(date)
3375 return self.start <= date <= self.end
5f6a1245 3376
bd558525 3377 def __str__(self):
5f6a1245 3378 return '%s - %s' % (self.start.isoformat(), self.end.isoformat())
c496ca96
PH
3379
3380
3381def platform_name():
3382 """ Returns the platform name as a compat_str """
3383 res = platform.platform()
3384 if isinstance(res, bytes):
3385 res = res.decode(preferredencoding())
3386
3387 assert isinstance(res, compat_str)
3388 return res
c257baff
PH
3389
3390
49fa4d9a
N
3391def get_windows_version():
3392 ''' Get Windows version. None if it's not running on Windows '''
3393 if compat_os_name == 'nt':
3394 return version_tuple(platform.win32_ver()[1])
3395 else:
3396 return None
3397
3398
b58ddb32
PH
3399def _windows_write_string(s, out):
3400 """ Returns True if the string was written using special methods,
3401 False if it has yet to be written out."""
3402 # Adapted from http://stackoverflow.com/a/3259271/35070
3403
3404 import ctypes
3405 import ctypes.wintypes
3406
3407 WIN_OUTPUT_IDS = {
3408 1: -11,
3409 2: -12,
3410 }
3411
a383a98a
PH
3412 try:
3413 fileno = out.fileno()
3414 except AttributeError:
3415 # If the output stream doesn't have a fileno, it's virtual
3416 return False
aa42e873
PH
3417 except io.UnsupportedOperation:
3418 # Some strange Windows pseudo files?
3419 return False
b58ddb32
PH
3420 if fileno not in WIN_OUTPUT_IDS:
3421 return False
3422
d7cd9a9e 3423 GetStdHandle = compat_ctypes_WINFUNCTYPE(
b58ddb32 3424 ctypes.wintypes.HANDLE, ctypes.wintypes.DWORD)(
d7cd9a9e 3425 ('GetStdHandle', ctypes.windll.kernel32))
b58ddb32
PH
3426 h = GetStdHandle(WIN_OUTPUT_IDS[fileno])
3427
d7cd9a9e 3428 WriteConsoleW = compat_ctypes_WINFUNCTYPE(
b58ddb32
PH
3429 ctypes.wintypes.BOOL, ctypes.wintypes.HANDLE, ctypes.wintypes.LPWSTR,
3430 ctypes.wintypes.DWORD, ctypes.POINTER(ctypes.wintypes.DWORD),
d7cd9a9e 3431 ctypes.wintypes.LPVOID)(('WriteConsoleW', ctypes.windll.kernel32))
b58ddb32
PH
3432 written = ctypes.wintypes.DWORD(0)
3433
d7cd9a9e 3434 GetFileType = compat_ctypes_WINFUNCTYPE(ctypes.wintypes.DWORD, ctypes.wintypes.DWORD)(('GetFileType', ctypes.windll.kernel32))
b58ddb32
PH
3435 FILE_TYPE_CHAR = 0x0002
3436 FILE_TYPE_REMOTE = 0x8000
d7cd9a9e 3437 GetConsoleMode = compat_ctypes_WINFUNCTYPE(
b58ddb32
PH
3438 ctypes.wintypes.BOOL, ctypes.wintypes.HANDLE,
3439 ctypes.POINTER(ctypes.wintypes.DWORD))(
d7cd9a9e 3440 ('GetConsoleMode', ctypes.windll.kernel32))
b58ddb32
PH
3441 INVALID_HANDLE_VALUE = ctypes.wintypes.DWORD(-1).value
3442
3443 def not_a_console(handle):
3444 if handle == INVALID_HANDLE_VALUE or handle is None:
3445 return True
3089bc74
S
3446 return ((GetFileType(handle) & ~FILE_TYPE_REMOTE) != FILE_TYPE_CHAR
3447 or GetConsoleMode(handle, ctypes.byref(ctypes.wintypes.DWORD())) == 0)
b58ddb32
PH
3448
3449 if not_a_console(h):
3450 return False
3451
d1b9c912
PH
3452 def next_nonbmp_pos(s):
3453 try:
3454 return next(i for i, c in enumerate(s) if ord(c) > 0xffff)
3455 except StopIteration:
3456 return len(s)
3457
3458 while s:
3459 count = min(next_nonbmp_pos(s), 1024)
3460
b58ddb32 3461 ret = WriteConsoleW(
d1b9c912 3462 h, s, count if count else 2, ctypes.byref(written), None)
b58ddb32
PH
3463 if ret == 0:
3464 raise OSError('Failed to write string')
d1b9c912
PH
3465 if not count: # We just wrote a non-BMP character
3466 assert written.value == 2
3467 s = s[1:]
3468 else:
3469 assert written.value > 0
3470 s = s[written.value:]
b58ddb32
PH
3471 return True
3472
3473
734f90bb 3474def write_string(s, out=None, encoding=None):
7459e3a2
PH
3475 if out is None:
3476 out = sys.stderr
8bf48f23 3477 assert type(s) == compat_str
7459e3a2 3478
b58ddb32
PH
3479 if sys.platform == 'win32' and encoding is None and hasattr(out, 'fileno'):
3480 if _windows_write_string(s, out):
3481 return
3482
3089bc74
S
3483 if ('b' in getattr(out, 'mode', '')
3484 or sys.version_info[0] < 3): # Python 2 lies about mode of sys.stderr
104aa738
PH
3485 byt = s.encode(encoding or preferredencoding(), 'ignore')
3486 out.write(byt)
3487 elif hasattr(out, 'buffer'):
3488 enc = encoding or getattr(out, 'encoding', None) or preferredencoding()
3489 byt = s.encode(enc, 'ignore')
3490 out.buffer.write(byt)
3491 else:
8bf48f23 3492 out.write(s)
7459e3a2
PH
3493 out.flush()
3494
3495
48ea9cea
PH
3496def bytes_to_intlist(bs):
3497 if not bs:
3498 return []
3499 if isinstance(bs[0], int): # Python 3
3500 return list(bs)
3501 else:
3502 return [ord(c) for c in bs]
3503
c257baff 3504
cba892fa 3505def intlist_to_bytes(xs):
3506 if not xs:
3507 return b''
edaa23f8 3508 return compat_struct_pack('%dB' % len(xs), *xs)
c38b1e77
PH
3509
3510
c1c9a79c
PH
3511# Cross-platform file locking
3512if sys.platform == 'win32':
3513 import ctypes.wintypes
3514 import msvcrt
3515
3516 class OVERLAPPED(ctypes.Structure):
3517 _fields_ = [
3518 ('Internal', ctypes.wintypes.LPVOID),
3519 ('InternalHigh', ctypes.wintypes.LPVOID),
3520 ('Offset', ctypes.wintypes.DWORD),
3521 ('OffsetHigh', ctypes.wintypes.DWORD),
3522 ('hEvent', ctypes.wintypes.HANDLE),
3523 ]
3524
3525 kernel32 = ctypes.windll.kernel32
3526 LockFileEx = kernel32.LockFileEx
3527 LockFileEx.argtypes = [
3528 ctypes.wintypes.HANDLE, # hFile
3529 ctypes.wintypes.DWORD, # dwFlags
3530 ctypes.wintypes.DWORD, # dwReserved
3531 ctypes.wintypes.DWORD, # nNumberOfBytesToLockLow
3532 ctypes.wintypes.DWORD, # nNumberOfBytesToLockHigh
3533 ctypes.POINTER(OVERLAPPED) # Overlapped
3534 ]
3535 LockFileEx.restype = ctypes.wintypes.BOOL
3536 UnlockFileEx = kernel32.UnlockFileEx
3537 UnlockFileEx.argtypes = [
3538 ctypes.wintypes.HANDLE, # hFile
3539 ctypes.wintypes.DWORD, # dwReserved
3540 ctypes.wintypes.DWORD, # nNumberOfBytesToLockLow
3541 ctypes.wintypes.DWORD, # nNumberOfBytesToLockHigh
3542 ctypes.POINTER(OVERLAPPED) # Overlapped
3543 ]
3544 UnlockFileEx.restype = ctypes.wintypes.BOOL
3545 whole_low = 0xffffffff
3546 whole_high = 0x7fffffff
3547
3548 def _lock_file(f, exclusive):
3549 overlapped = OVERLAPPED()
3550 overlapped.Offset = 0
3551 overlapped.OffsetHigh = 0
3552 overlapped.hEvent = 0
3553 f._lock_file_overlapped_p = ctypes.pointer(overlapped)
3554 handle = msvcrt.get_osfhandle(f.fileno())
3555 if not LockFileEx(handle, 0x2 if exclusive else 0x0, 0,
3556 whole_low, whole_high, f._lock_file_overlapped_p):
3557 raise OSError('Locking file failed: %r' % ctypes.FormatError())
3558
3559 def _unlock_file(f):
3560 assert f._lock_file_overlapped_p
3561 handle = msvcrt.get_osfhandle(f.fileno())
3562 if not UnlockFileEx(handle, 0,
3563 whole_low, whole_high, f._lock_file_overlapped_p):
3564 raise OSError('Unlocking file failed: %r' % ctypes.FormatError())
3565
3566else:
399a76e6
YCH
3567 # Some platforms, such as Jython, is missing fcntl
3568 try:
3569 import fcntl
c1c9a79c 3570
399a76e6
YCH
3571 def _lock_file(f, exclusive):
3572 fcntl.flock(f, fcntl.LOCK_EX if exclusive else fcntl.LOCK_SH)
c1c9a79c 3573
399a76e6
YCH
3574 def _unlock_file(f):
3575 fcntl.flock(f, fcntl.LOCK_UN)
3576 except ImportError:
3577 UNSUPPORTED_MSG = 'file locking is not supported on this platform'
3578
3579 def _lock_file(f, exclusive):
3580 raise IOError(UNSUPPORTED_MSG)
3581
3582 def _unlock_file(f):
3583 raise IOError(UNSUPPORTED_MSG)
c1c9a79c
PH
3584
3585
3586class locked_file(object):
3587 def __init__(self, filename, mode, encoding=None):
3588 assert mode in ['r', 'a', 'w']
3589 self.f = io.open(filename, mode, encoding=encoding)
3590 self.mode = mode
3591
3592 def __enter__(self):
3593 exclusive = self.mode != 'r'
3594 try:
3595 _lock_file(self.f, exclusive)
3596 except IOError:
3597 self.f.close()
3598 raise
3599 return self
3600
3601 def __exit__(self, etype, value, traceback):
3602 try:
3603 _unlock_file(self.f)
3604 finally:
3605 self.f.close()
3606
3607 def __iter__(self):
3608 return iter(self.f)
3609
3610 def write(self, *args):
3611 return self.f.write(*args)
3612
3613 def read(self, *args):
3614 return self.f.read(*args)
4eb7f1d1
JMF
3615
3616
4644ac55
S
3617def get_filesystem_encoding():
3618 encoding = sys.getfilesystemencoding()
3619 return encoding if encoding is not None else 'utf-8'
3620
3621
4eb7f1d1 3622def shell_quote(args):
a6a173c2 3623 quoted_args = []
4644ac55 3624 encoding = get_filesystem_encoding()
a6a173c2
JMF
3625 for a in args:
3626 if isinstance(a, bytes):
3627 # We may get a filename encoded with 'encodeFilename'
3628 a = a.decode(encoding)
aefce8e6 3629 quoted_args.append(compat_shlex_quote(a))
28e614de 3630 return ' '.join(quoted_args)
9d4660ca
PH
3631
3632
3633def smuggle_url(url, data):
3634 """ Pass additional data in a URL for internal use. """
3635
81953d1a
RA
3636 url, idata = unsmuggle_url(url, {})
3637 data.update(idata)
15707c7e 3638 sdata = compat_urllib_parse_urlencode(
28e614de
PH
3639 {'__youtubedl_smuggle': json.dumps(data)})
3640 return url + '#' + sdata
9d4660ca
PH
3641
3642
79f82953 3643def unsmuggle_url(smug_url, default=None):
83e865a3 3644 if '#__youtubedl_smuggle' not in smug_url:
79f82953 3645 return smug_url, default
28e614de
PH
3646 url, _, sdata = smug_url.rpartition('#')
3647 jsond = compat_parse_qs(sdata)['__youtubedl_smuggle'][0]
9d4660ca
PH
3648 data = json.loads(jsond)
3649 return url, data
02dbf93f
PH
3650
3651
02dbf93f
PH
3652def format_bytes(bytes):
3653 if bytes is None:
28e614de 3654 return 'N/A'
02dbf93f
PH
3655 if type(bytes) is str:
3656 bytes = float(bytes)
3657 if bytes == 0.0:
3658 exponent = 0
3659 else:
3660 exponent = int(math.log(bytes, 1024.0))
28e614de 3661 suffix = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'][exponent]
02dbf93f 3662 converted = float(bytes) / float(1024 ** exponent)
28e614de 3663 return '%.2f%s' % (converted, suffix)
f53c966a 3664
1c088fa8 3665
fb47597b
S
3666def lookup_unit_table(unit_table, s):
3667 units_re = '|'.join(re.escape(u) for u in unit_table)
3668 m = re.match(
782b1b5b 3669 r'(?P<num>[0-9]+(?:[,.][0-9]*)?)\s*(?P<unit>%s)\b' % units_re, s)
fb47597b
S
3670 if not m:
3671 return None
3672 num_str = m.group('num').replace(',', '.')
3673 mult = unit_table[m.group('unit')]
3674 return int(float(num_str) * mult)
3675
3676
be64b5b0
PH
3677def parse_filesize(s):
3678 if s is None:
3679 return None
3680
dfb1b146 3681 # The lower-case forms are of course incorrect and unofficial,
be64b5b0
PH
3682 # but we support those too
3683 _UNIT_TABLE = {
3684 'B': 1,
3685 'b': 1,
70852b47 3686 'bytes': 1,
be64b5b0
PH
3687 'KiB': 1024,
3688 'KB': 1000,
3689 'kB': 1024,
3690 'Kb': 1000,
13585d76 3691 'kb': 1000,
70852b47
YCH
3692 'kilobytes': 1000,
3693 'kibibytes': 1024,
be64b5b0
PH
3694 'MiB': 1024 ** 2,
3695 'MB': 1000 ** 2,
3696 'mB': 1024 ** 2,
3697 'Mb': 1000 ** 2,
13585d76 3698 'mb': 1000 ** 2,
70852b47
YCH
3699 'megabytes': 1000 ** 2,
3700 'mebibytes': 1024 ** 2,
be64b5b0
PH
3701 'GiB': 1024 ** 3,
3702 'GB': 1000 ** 3,
3703 'gB': 1024 ** 3,
3704 'Gb': 1000 ** 3,
13585d76 3705 'gb': 1000 ** 3,
70852b47
YCH
3706 'gigabytes': 1000 ** 3,
3707 'gibibytes': 1024 ** 3,
be64b5b0
PH
3708 'TiB': 1024 ** 4,
3709 'TB': 1000 ** 4,
3710 'tB': 1024 ** 4,
3711 'Tb': 1000 ** 4,
13585d76 3712 'tb': 1000 ** 4,
70852b47
YCH
3713 'terabytes': 1000 ** 4,
3714 'tebibytes': 1024 ** 4,
be64b5b0
PH
3715 'PiB': 1024 ** 5,
3716 'PB': 1000 ** 5,
3717 'pB': 1024 ** 5,
3718 'Pb': 1000 ** 5,
13585d76 3719 'pb': 1000 ** 5,
70852b47
YCH
3720 'petabytes': 1000 ** 5,
3721 'pebibytes': 1024 ** 5,
be64b5b0
PH
3722 'EiB': 1024 ** 6,
3723 'EB': 1000 ** 6,
3724 'eB': 1024 ** 6,
3725 'Eb': 1000 ** 6,
13585d76 3726 'eb': 1000 ** 6,
70852b47
YCH
3727 'exabytes': 1000 ** 6,
3728 'exbibytes': 1024 ** 6,
be64b5b0
PH
3729 'ZiB': 1024 ** 7,
3730 'ZB': 1000 ** 7,
3731 'zB': 1024 ** 7,
3732 'Zb': 1000 ** 7,
13585d76 3733 'zb': 1000 ** 7,
70852b47
YCH
3734 'zettabytes': 1000 ** 7,
3735 'zebibytes': 1024 ** 7,
be64b5b0
PH
3736 'YiB': 1024 ** 8,
3737 'YB': 1000 ** 8,
3738 'yB': 1024 ** 8,
3739 'Yb': 1000 ** 8,
13585d76 3740 'yb': 1000 ** 8,
70852b47
YCH
3741 'yottabytes': 1000 ** 8,
3742 'yobibytes': 1024 ** 8,
be64b5b0
PH
3743 }
3744
fb47597b
S
3745 return lookup_unit_table(_UNIT_TABLE, s)
3746
3747
3748def parse_count(s):
3749 if s is None:
be64b5b0
PH
3750 return None
3751
fb47597b
S
3752 s = s.strip()
3753
3754 if re.match(r'^[\d,.]+$', s):
3755 return str_to_int(s)
3756
3757 _UNIT_TABLE = {
3758 'k': 1000,
3759 'K': 1000,
3760 'm': 1000 ** 2,
3761 'M': 1000 ** 2,
3762 'kk': 1000 ** 2,
3763 'KK': 1000 ** 2,
3764 }
be64b5b0 3765
fb47597b 3766 return lookup_unit_table(_UNIT_TABLE, s)
be64b5b0 3767
2f7ae819 3768
b871d7e9
S
3769def parse_resolution(s):
3770 if s is None:
3771 return {}
3772
17ec8bcf 3773 mobj = re.search(r'(?<![a-zA-Z0-9])(?P<w>\d+)\s*[xX×,]\s*(?P<h>\d+)(?![a-zA-Z0-9])', s)
b871d7e9
S
3774 if mobj:
3775 return {
3776 'width': int(mobj.group('w')),
3777 'height': int(mobj.group('h')),
3778 }
3779
17ec8bcf 3780 mobj = re.search(r'(?<![a-zA-Z0-9])(\d+)[pPiI](?![a-zA-Z0-9])', s)
b871d7e9
S
3781 if mobj:
3782 return {'height': int(mobj.group(1))}
3783
3784 mobj = re.search(r'\b([48])[kK]\b', s)
3785 if mobj:
3786 return {'height': int(mobj.group(1)) * 540}
3787
3788 return {}
3789
3790
0dc41787
S
3791def parse_bitrate(s):
3792 if not isinstance(s, compat_str):
3793 return
3794 mobj = re.search(r'\b(\d+)\s*kbps', s)
3795 if mobj:
3796 return int(mobj.group(1))
3797
3798
a942d6cb 3799def month_by_name(name, lang='en'):
caefb1de
PH
3800 """ Return the number of a month by (locale-independently) English name """
3801
f6717dec 3802 month_names = MONTH_NAMES.get(lang, MONTH_NAMES['en'])
a942d6cb 3803
caefb1de 3804 try:
f6717dec 3805 return month_names.index(name) + 1
7105440c
YCH
3806 except ValueError:
3807 return None
3808
3809
3810def month_by_abbreviation(abbrev):
3811 """ Return the number of a month by (locale-independently) English
3812 abbreviations """
3813
3814 try:
3815 return [s[:3] for s in ENGLISH_MONTH_NAMES].index(abbrev) + 1
caefb1de
PH
3816 except ValueError:
3817 return None
18258362
JMF
3818
3819
5aafe895 3820def fix_xml_ampersands(xml_str):
18258362 3821 """Replace all the '&' by '&amp;' in XML"""
5aafe895
PH
3822 return re.sub(
3823 r'&(?!amp;|lt;|gt;|apos;|quot;|#x[0-9a-fA-F]{,4};|#[0-9]{,4};)',
28e614de 3824 '&amp;',
5aafe895 3825 xml_str)
e3946f98
PH
3826
3827
3828def setproctitle(title):
8bf48f23 3829 assert isinstance(title, compat_str)
c1c05c67
YCH
3830
3831 # ctypes in Jython is not complete
3832 # http://bugs.jython.org/issue2148
3833 if sys.platform.startswith('java'):
3834 return
3835
e3946f98 3836 try:
611c1dd9 3837 libc = ctypes.cdll.LoadLibrary('libc.so.6')
e3946f98
PH
3838 except OSError:
3839 return
2f49bcd6
RC
3840 except TypeError:
3841 # LoadLibrary in Windows Python 2.7.13 only expects
3842 # a bytestring, but since unicode_literals turns
3843 # every string into a unicode string, it fails.
3844 return
6eefe533
PH
3845 title_bytes = title.encode('utf-8')
3846 buf = ctypes.create_string_buffer(len(title_bytes))
3847 buf.value = title_bytes
e3946f98 3848 try:
6eefe533 3849 libc.prctl(15, buf, 0, 0, 0)
e3946f98
PH
3850 except AttributeError:
3851 return # Strange libc, just skip this
d7dda168
PH
3852
3853
3854def remove_start(s, start):
46bc9b7d 3855 return s[len(start):] if s is not None and s.startswith(start) else s
29eb5174
PH
3856
3857
2b9faf55 3858def remove_end(s, end):
46bc9b7d 3859 return s[:-len(end)] if s is not None and s.endswith(end) else s
2b9faf55
PH
3860
3861
31b2051e
S
3862def remove_quotes(s):
3863 if s is None or len(s) < 2:
3864 return s
3865 for quote in ('"', "'", ):
3866 if s[0] == quote and s[-1] == quote:
3867 return s[1:-1]
3868 return s
3869
3870
b6e0c7d2
U
3871def get_domain(url):
3872 domain = re.match(r'(?:https?:\/\/)?(?:www\.)?(?P<domain>[^\n\/]+\.[^\n\/]+)(?:\/(.*))?', url)
3873 return domain.group('domain') if domain else None
3874
3875
29eb5174 3876def url_basename(url):
9b8aaeed 3877 path = compat_urlparse.urlparse(url).path
28e614de 3878 return path.strip('/').split('/')[-1]
aa94a6d3
PH
3879
3880
02dc0a36
S
3881def base_url(url):
3882 return re.match(r'https?://[^?#&]+/', url).group()
3883
3884
e34c3361 3885def urljoin(base, path):
4b5de77b
S
3886 if isinstance(path, bytes):
3887 path = path.decode('utf-8')
e34c3361
S
3888 if not isinstance(path, compat_str) or not path:
3889 return None
fad4ceb5 3890 if re.match(r'^(?:[a-zA-Z][a-zA-Z0-9+-.]*:)?//', path):
e34c3361 3891 return path
4b5de77b
S
3892 if isinstance(base, bytes):
3893 base = base.decode('utf-8')
3894 if not isinstance(base, compat_str) or not re.match(
3895 r'^(?:https?:)?//', base):
e34c3361
S
3896 return None
3897 return compat_urlparse.urljoin(base, path)
3898
3899
aa94a6d3
PH
3900class HEADRequest(compat_urllib_request.Request):
3901 def get_method(self):
611c1dd9 3902 return 'HEAD'
7217e148
PH
3903
3904
95cf60e8
S
3905class PUTRequest(compat_urllib_request.Request):
3906 def get_method(self):
3907 return 'PUT'
3908
3909
9732d77e 3910def int_or_none(v, scale=1, default=None, get_attr=None, invscale=1):
28746fbd
PH
3911 if get_attr:
3912 if v is not None:
3913 v = getattr(v, get_attr, None)
9572013d
PH
3914 if v == '':
3915 v = None
1812afb7
S
3916 if v is None:
3917 return default
3918 try:
3919 return int(v) * invscale // scale
31c49255 3920 except (ValueError, TypeError, OverflowError):
af98f8ff 3921 return default
9732d77e 3922
9572013d 3923
40a90862
JMF
3924def str_or_none(v, default=None):
3925 return default if v is None else compat_str(v)
3926
9732d77e
PH
3927
3928def str_to_int(int_str):
48d4681e 3929 """ A more relaxed version of int_or_none """
42db58ec 3930 if isinstance(int_str, compat_integer_types):
348c6bf1 3931 return int_str
42db58ec
S
3932 elif isinstance(int_str, compat_str):
3933 int_str = re.sub(r'[,\.\+]', '', int_str)
3934 return int_or_none(int_str)
608d11f5
PH
3935
3936
9732d77e 3937def float_or_none(v, scale=1, invscale=1, default=None):
caf80631
S
3938 if v is None:
3939 return default
3940 try:
3941 return float(v) * invscale / scale
5e1271c5 3942 except (ValueError, TypeError):
caf80631 3943 return default
43f775e4
PH
3944
3945
c7e327c4
S
3946def bool_or_none(v, default=None):
3947 return v if isinstance(v, bool) else default
3948
3949
53cd37ba
S
3950def strip_or_none(v, default=None):
3951 return v.strip() if isinstance(v, compat_str) else default
b72b4431
S
3952
3953
af03000a
S
3954def url_or_none(url):
3955 if not url or not isinstance(url, compat_str):
3956 return None
3957 url = url.strip()
29f7c58a 3958 return url if re.match(r'^(?:(?:https?|rt(?:m(?:pt?[es]?|fp)|sp[su]?)|mms|ftps?):)?//', url) else None
af03000a
S
3959
3960
e29663c6 3961def strftime_or_none(timestamp, date_format, default=None):
3962 datetime_object = None
3963 try:
3964 if isinstance(timestamp, compat_numeric_types): # unix timestamp
3965 datetime_object = datetime.datetime.utcfromtimestamp(timestamp)
3966 elif isinstance(timestamp, compat_str): # assume YYYYMMDD
3967 datetime_object = datetime.datetime.strptime(timestamp, '%Y%m%d')
3968 return datetime_object.strftime(date_format)
3969 except (ValueError, TypeError, AttributeError):
3970 return default
3971
3972
608d11f5 3973def parse_duration(s):
8f9312c3 3974 if not isinstance(s, compat_basestring):
608d11f5 3975 return None
ca7b3246 3976 s = s.strip()
38d79fd1 3977 if not s:
3978 return None
ca7b3246 3979
acaff495 3980 days, hours, mins, secs, ms = [None] * 5
15846398 3981 m = re.match(r'(?:(?:(?:(?P<days>[0-9]+):)?(?P<hours>[0-9]+):)?(?P<mins>[0-9]+):)?(?P<secs>[0-9]+)(?P<ms>\.[0-9]+)?Z?$', s)
acaff495 3982 if m:
3983 days, hours, mins, secs, ms = m.groups()
3984 else:
3985 m = re.match(
056653bb
S
3986 r'''(?ix)(?:P?
3987 (?:
3988 [0-9]+\s*y(?:ears?)?\s*
3989 )?
3990 (?:
3991 [0-9]+\s*m(?:onths?)?\s*
3992 )?
3993 (?:
3994 [0-9]+\s*w(?:eeks?)?\s*
3995 )?
8f4b58d7 3996 (?:
acaff495 3997 (?P<days>[0-9]+)\s*d(?:ays?)?\s*
8f4b58d7 3998 )?
056653bb 3999 T)?
acaff495 4000 (?:
4001 (?P<hours>[0-9]+)\s*h(?:ours?)?\s*
4002 )?
4003 (?:
4004 (?P<mins>[0-9]+)\s*m(?:in(?:ute)?s?)?\s*
4005 )?
4006 (?:
4007 (?P<secs>[0-9]+)(?P<ms>\.[0-9]+)?\s*s(?:ec(?:ond)?s?)?\s*
15846398 4008 )?Z?$''', s)
acaff495 4009 if m:
4010 days, hours, mins, secs, ms = m.groups()
4011 else:
15846398 4012 m = re.match(r'(?i)(?:(?P<hours>[0-9.]+)\s*(?:hours?)|(?P<mins>[0-9.]+)\s*(?:mins?\.?|minutes?)\s*)Z?$', s)
acaff495 4013 if m:
4014 hours, mins = m.groups()
4015 else:
4016 return None
4017
4018 duration = 0
4019 if secs:
4020 duration += float(secs)
4021 if mins:
4022 duration += float(mins) * 60
4023 if hours:
4024 duration += float(hours) * 60 * 60
4025 if days:
4026 duration += float(days) * 24 * 60 * 60
4027 if ms:
4028 duration += float(ms)
4029 return duration
91d7d0b3
JMF
4030
4031
e65e4c88 4032def prepend_extension(filename, ext, expected_real_ext=None):
5f6a1245 4033 name, real_ext = os.path.splitext(filename)
e65e4c88
S
4034 return (
4035 '{0}.{1}{2}'.format(name, ext, real_ext)
4036 if not expected_real_ext or real_ext[1:] == expected_real_ext
4037 else '{0}.{1}'.format(filename, ext))
d70ad093
PH
4038
4039
b3ed15b7
S
4040def replace_extension(filename, ext, expected_real_ext=None):
4041 name, real_ext = os.path.splitext(filename)
4042 return '{0}.{1}'.format(
4043 name if not expected_real_ext or real_ext[1:] == expected_real_ext else filename,
4044 ext)
4045
4046
d70ad093
PH
4047def check_executable(exe, args=[]):
4048 """ Checks if the given binary is installed somewhere in PATH, and returns its name.
4049 args can be a list of arguments for a short output (like -version) """
4050 try:
d3c93ec2 4051 Popen([exe] + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate_or_kill()
d70ad093
PH
4052 except OSError:
4053 return False
4054 return exe
b7ab0590
PH
4055
4056
9af98e17 4057def _get_exe_version_output(exe, args):
95807118 4058 try:
b64d04c1 4059 # STDIN should be redirected too. On UNIX-like systems, ffmpeg triggers
7a5c1cfe 4060 # SIGTTOU if yt-dlp is run in the background.
067aa17e 4061 # See https://github.com/ytdl-org/youtube-dl/issues/955#issuecomment-209789656
d3c93ec2 4062 out, _ = Popen(
4063 [encodeArgument(exe)] + args, stdin=subprocess.PIPE,
4064 stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate_or_kill()
95807118
PH
4065 except OSError:
4066 return False
cae97f65
PH
4067 if isinstance(out, bytes): # Python 2.x
4068 out = out.decode('ascii', 'ignore')
9af98e17 4069 return out
cae97f65
PH
4070
4071
4072def detect_exe_version(output, version_re=None, unrecognized='present'):
4073 assert isinstance(output, compat_str)
4074 if version_re is None:
4075 version_re = r'version\s+([-0-9._a-zA-Z]+)'
4076 m = re.search(version_re, output)
95807118
PH
4077 if m:
4078 return m.group(1)
4079 else:
4080 return unrecognized
4081
4082
9af98e17 4083def get_exe_version(exe, args=['--version'],
4084 version_re=None, unrecognized='present'):
4085 """ Returns the version of the specified executable,
4086 or False if the executable is not present """
4087 out = _get_exe_version_output(exe, args)
4088 return detect_exe_version(out, version_re, unrecognized) if out else False
4089
4090
cb89cfc1 4091class LazyList(collections.abc.Sequence):
483336e7 4092 ''' Lazy immutable list from an iterable
4093 Note that slices of a LazyList are lists and not LazyList'''
4094
8e5fecc8 4095 class IndexError(IndexError):
4096 pass
4097
282f5709 4098 def __init__(self, iterable, *, reverse=False, _cache=None):
483336e7 4099 self.__iterable = iter(iterable)
282f5709 4100 self.__cache = [] if _cache is None else _cache
4101 self.__reversed = reverse
483336e7 4102
4103 def __iter__(self):
28419ca2 4104 if self.__reversed:
4105 # We need to consume the entire iterable to iterate in reverse
981052c9 4106 yield from self.exhaust()
28419ca2 4107 return
4108 yield from self.__cache
483336e7 4109 for item in self.__iterable:
4110 self.__cache.append(item)
4111 yield item
4112
981052c9 4113 def __exhaust(self):
483336e7 4114 self.__cache.extend(self.__iterable)
9f1a1c36 4115 # Discard the emptied iterable to make it pickle-able
4116 self.__iterable = []
28419ca2 4117 return self.__cache
4118
981052c9 4119 def exhaust(self):
4120 ''' Evaluate the entire iterable '''
4121 return self.__exhaust()[::-1 if self.__reversed else 1]
4122
28419ca2 4123 @staticmethod
981052c9 4124 def __reverse_index(x):
e0f2b4b4 4125 return None if x is None else -(x + 1)
483336e7 4126
4127 def __getitem__(self, idx):
4128 if isinstance(idx, slice):
28419ca2 4129 if self.__reversed:
e0f2b4b4 4130 idx = slice(self.__reverse_index(idx.start), self.__reverse_index(idx.stop), -(idx.step or 1))
4131 start, stop, step = idx.start, idx.stop, idx.step or 1
483336e7 4132 elif isinstance(idx, int):
28419ca2 4133 if self.__reversed:
981052c9 4134 idx = self.__reverse_index(idx)
e0f2b4b4 4135 start, stop, step = idx, idx, 0
483336e7 4136 else:
4137 raise TypeError('indices must be integers or slices')
e0f2b4b4 4138 if ((start or 0) < 0 or (stop or 0) < 0
4139 or (start is None and step < 0)
4140 or (stop is None and step > 0)):
483336e7 4141 # We need to consume the entire iterable to be able to slice from the end
4142 # Obviously, never use this with infinite iterables
8e5fecc8 4143 self.__exhaust()
4144 try:
4145 return self.__cache[idx]
4146 except IndexError as e:
4147 raise self.IndexError(e) from e
e0f2b4b4 4148 n = max(start or 0, stop or 0) - len(self.__cache) + 1
28419ca2 4149 if n > 0:
4150 self.__cache.extend(itertools.islice(self.__iterable, n))
8e5fecc8 4151 try:
4152 return self.__cache[idx]
4153 except IndexError as e:
4154 raise self.IndexError(e) from e
483336e7 4155
4156 def __bool__(self):
4157 try:
28419ca2 4158 self[-1] if self.__reversed else self[0]
8e5fecc8 4159 except self.IndexError:
483336e7 4160 return False
4161 return True
4162
4163 def __len__(self):
8e5fecc8 4164 self.__exhaust()
483336e7 4165 return len(self.__cache)
4166
282f5709 4167 def __reversed__(self):
4168 return type(self)(self.__iterable, reverse=not self.__reversed, _cache=self.__cache)
4169
4170 def __copy__(self):
4171 return type(self)(self.__iterable, reverse=self.__reversed, _cache=self.__cache)
4172
4173 def __deepcopy__(self, memo):
4174 # FIXME: This is actually just a shallow copy
4175 id_ = id(self)
4176 memo[id_] = self.__copy__()
4177 return memo[id_]
28419ca2 4178
4179 def __repr__(self):
4180 # repr and str should mimic a list. So we exhaust the iterable
4181 return repr(self.exhaust())
4182
4183 def __str__(self):
4184 return repr(self.exhaust())
4185
483336e7 4186
7be9ccff 4187class PagedList:
c07a39ae 4188
4189 class IndexError(IndexError):
4190 pass
4191
dd26ced1
PH
4192 def __len__(self):
4193 # This is only useful for tests
4194 return len(self.getslice())
4195
7be9ccff 4196 def __init__(self, pagefunc, pagesize, use_cache=True):
4197 self._pagefunc = pagefunc
4198 self._pagesize = pagesize
4199 self._use_cache = use_cache
4200 self._cache = {}
4201
4202 def getpage(self, pagenum):
d8cf8d97 4203 page_results = self._cache.get(pagenum)
4204 if page_results is None:
4205 page_results = list(self._pagefunc(pagenum))
7be9ccff 4206 if self._use_cache:
4207 self._cache[pagenum] = page_results
4208 return page_results
4209
4210 def getslice(self, start=0, end=None):
4211 return list(self._getslice(start, end))
4212
4213 def _getslice(self, start, end):
55575225 4214 raise NotImplementedError('This method must be implemented by subclasses')
4215
4216 def __getitem__(self, idx):
7be9ccff 4217 # NOTE: cache must be enabled if this is used
55575225 4218 if not isinstance(idx, int) or idx < 0:
4219 raise TypeError('indices must be non-negative integers')
4220 entries = self.getslice(idx, idx + 1)
d8cf8d97 4221 if not entries:
c07a39ae 4222 raise self.IndexError()
d8cf8d97 4223 return entries[0]
55575225 4224
9c44d242
PH
4225
4226class OnDemandPagedList(PagedList):
7be9ccff 4227 def _getslice(self, start, end):
b7ab0590
PH
4228 for pagenum in itertools.count(start // self._pagesize):
4229 firstid = pagenum * self._pagesize
4230 nextfirstid = pagenum * self._pagesize + self._pagesize
4231 if start >= nextfirstid:
4232 continue
4233
b7ab0590
PH
4234 startv = (
4235 start % self._pagesize
4236 if firstid <= start < nextfirstid
4237 else 0)
b7ab0590
PH
4238 endv = (
4239 ((end - 1) % self._pagesize) + 1
4240 if (end is not None and firstid <= end <= nextfirstid)
4241 else None)
4242
7be9ccff 4243 page_results = self.getpage(pagenum)
b7ab0590
PH
4244 if startv != 0 or endv is not None:
4245 page_results = page_results[startv:endv]
7be9ccff 4246 yield from page_results
b7ab0590
PH
4247
4248 # A little optimization - if current page is not "full", ie. does
4249 # not contain page_size videos then we can assume that this page
4250 # is the last one - there are no more ids on further pages -
4251 # i.e. no need to query again.
4252 if len(page_results) + startv < self._pagesize:
4253 break
4254
4255 # If we got the whole page, but the next page is not interesting,
4256 # break out early as well
4257 if end == nextfirstid:
4258 break
81c2f20b
PH
4259
4260
9c44d242
PH
4261class InAdvancePagedList(PagedList):
4262 def __init__(self, pagefunc, pagecount, pagesize):
9c44d242 4263 self._pagecount = pagecount
7be9ccff 4264 PagedList.__init__(self, pagefunc, pagesize, True)
9c44d242 4265
7be9ccff 4266 def _getslice(self, start, end):
9c44d242
PH
4267 start_page = start // self._pagesize
4268 end_page = (
4269 self._pagecount if end is None else (end // self._pagesize + 1))
4270 skip_elems = start - start_page * self._pagesize
4271 only_more = None if end is None else end - start
4272 for pagenum in range(start_page, end_page):
7be9ccff 4273 page_results = self.getpage(pagenum)
9c44d242 4274 if skip_elems:
7be9ccff 4275 page_results = page_results[skip_elems:]
9c44d242
PH
4276 skip_elems = None
4277 if only_more is not None:
7be9ccff 4278 if len(page_results) < only_more:
4279 only_more -= len(page_results)
9c44d242 4280 else:
7be9ccff 4281 yield from page_results[:only_more]
9c44d242 4282 break
7be9ccff 4283 yield from page_results
9c44d242
PH
4284
4285
81c2f20b 4286def uppercase_escape(s):
676eb3f2 4287 unicode_escape = codecs.getdecoder('unicode_escape')
81c2f20b 4288 return re.sub(
a612753d 4289 r'\\U[0-9a-fA-F]{8}',
676eb3f2
PH
4290 lambda m: unicode_escape(m.group(0))[0],
4291 s)
0fe2ff78
YCH
4292
4293
4294def lowercase_escape(s):
4295 unicode_escape = codecs.getdecoder('unicode_escape')
4296 return re.sub(
4297 r'\\u[0-9a-fA-F]{4}',
4298 lambda m: unicode_escape(m.group(0))[0],
4299 s)
b53466e1 4300
d05cfe06
S
4301
4302def escape_rfc3986(s):
4303 """Escape non-ASCII characters as suggested by RFC 3986"""
8f9312c3 4304 if sys.version_info < (3, 0) and isinstance(s, compat_str):
d05cfe06 4305 s = s.encode('utf-8')
ecc0c5ee 4306 return compat_urllib_parse.quote(s, b"%/;:@&=+$,!~*'()?#[]")
d05cfe06
S
4307
4308
4309def escape_url(url):
4310 """Escape URL as suggested by RFC 3986"""
4311 url_parsed = compat_urllib_parse_urlparse(url)
4312 return url_parsed._replace(
efbed08d 4313 netloc=url_parsed.netloc.encode('idna').decode('ascii'),
d05cfe06
S
4314 path=escape_rfc3986(url_parsed.path),
4315 params=escape_rfc3986(url_parsed.params),
4316 query=escape_rfc3986(url_parsed.query),
4317 fragment=escape_rfc3986(url_parsed.fragment)
4318 ).geturl()
4319
62e609ab 4320
4dfbf869 4321def parse_qs(url):
4322 return compat_parse_qs(compat_urllib_parse_urlparse(url).query)
4323
4324
62e609ab
PH
4325def read_batch_urls(batch_fd):
4326 def fixup(url):
4327 if not isinstance(url, compat_str):
4328 url = url.decode('utf-8', 'replace')
8c04f0be 4329 BOM_UTF8 = ('\xef\xbb\xbf', '\ufeff')
4330 for bom in BOM_UTF8:
4331 if url.startswith(bom):
4332 url = url[len(bom):]
4333 url = url.lstrip()
4334 if not url or url.startswith(('#', ';', ']')):
62e609ab 4335 return False
8c04f0be 4336 # "#" cannot be stripped out since it is part of the URI
4337 # However, it can be safely stipped out if follwing a whitespace
4338 return re.split(r'\s#', url, 1)[0].rstrip()
62e609ab
PH
4339
4340 with contextlib.closing(batch_fd) as fd:
4341 return [url for url in map(fixup, fd) if url]
b74fa8cd
JMF
4342
4343
4344def urlencode_postdata(*args, **kargs):
15707c7e 4345 return compat_urllib_parse_urlencode(*args, **kargs).encode('ascii')
bcf89ce6
PH
4346
4347
38f9ef31 4348def update_url_query(url, query):
cacd9966
YCH
4349 if not query:
4350 return url
38f9ef31 4351 parsed_url = compat_urlparse.urlparse(url)
4352 qs = compat_parse_qs(parsed_url.query)
4353 qs.update(query)
4354 return compat_urlparse.urlunparse(parsed_url._replace(
15707c7e 4355 query=compat_urllib_parse_urlencode(qs, True)))
16392824 4356
8e60dc75 4357
ed0291d1
S
4358def update_Request(req, url=None, data=None, headers={}, query={}):
4359 req_headers = req.headers.copy()
4360 req_headers.update(headers)
4361 req_data = data or req.data
4362 req_url = update_url_query(url or req.get_full_url(), query)
95cf60e8
S
4363 req_get_method = req.get_method()
4364 if req_get_method == 'HEAD':
4365 req_type = HEADRequest
4366 elif req_get_method == 'PUT':
4367 req_type = PUTRequest
4368 else:
4369 req_type = compat_urllib_request.Request
ed0291d1
S
4370 new_req = req_type(
4371 req_url, data=req_data, headers=req_headers,
4372 origin_req_host=req.origin_req_host, unverifiable=req.unverifiable)
4373 if hasattr(req, 'timeout'):
4374 new_req.timeout = req.timeout
4375 return new_req
4376
4377
10c87c15 4378def _multipart_encode_impl(data, boundary):
0c265486
YCH
4379 content_type = 'multipart/form-data; boundary=%s' % boundary
4380
4381 out = b''
4382 for k, v in data.items():
4383 out += b'--' + boundary.encode('ascii') + b'\r\n'
4384 if isinstance(k, compat_str):
4385 k = k.encode('utf-8')
4386 if isinstance(v, compat_str):
4387 v = v.encode('utf-8')
4388 # RFC 2047 requires non-ASCII field names to be encoded, while RFC 7578
4389 # suggests sending UTF-8 directly. Firefox sends UTF-8, too
b2ad479d 4390 content = b'Content-Disposition: form-data; name="' + k + b'"\r\n\r\n' + v + b'\r\n'
0c265486
YCH
4391 if boundary.encode('ascii') in content:
4392 raise ValueError('Boundary overlaps with data')
4393 out += content
4394
4395 out += b'--' + boundary.encode('ascii') + b'--\r\n'
4396
4397 return out, content_type
4398
4399
4400def multipart_encode(data, boundary=None):
4401 '''
4402 Encode a dict to RFC 7578-compliant form-data
4403
4404 data:
4405 A dict where keys and values can be either Unicode or bytes-like
4406 objects.
4407 boundary:
4408 If specified a Unicode object, it's used as the boundary. Otherwise
4409 a random boundary is generated.
4410
4411 Reference: https://tools.ietf.org/html/rfc7578
4412 '''
4413 has_specified_boundary = boundary is not None
4414
4415 while True:
4416 if boundary is None:
4417 boundary = '---------------' + str(random.randrange(0x0fffffff, 0xffffffff))
4418
4419 try:
10c87c15 4420 out, content_type = _multipart_encode_impl(data, boundary)
0c265486
YCH
4421 break
4422 except ValueError:
4423 if has_specified_boundary:
4424 raise
4425 boundary = None
4426
4427 return out, content_type
4428
4429
86296ad2 4430def dict_get(d, key_or_keys, default=None, skip_false_values=True):
cbecc9b9
S
4431 if isinstance(key_or_keys, (list, tuple)):
4432 for key in key_or_keys:
86296ad2
S
4433 if key not in d or d[key] is None or skip_false_values and not d[key]:
4434 continue
4435 return d[key]
cbecc9b9
S
4436 return default
4437 return d.get(key_or_keys, default)
4438
4439
329ca3be 4440def try_get(src, getter, expected_type=None):
6606817a 4441 for get in variadic(getter):
a32a9a7e
S
4442 try:
4443 v = get(src)
4444 except (AttributeError, KeyError, TypeError, IndexError):
4445 pass
4446 else:
4447 if expected_type is None or isinstance(v, expected_type):
4448 return v
329ca3be
S
4449
4450
6cc62232
S
4451def merge_dicts(*dicts):
4452 merged = {}
4453 for a_dict in dicts:
4454 for k, v in a_dict.items():
4455 if v is None:
4456 continue
3089bc74
S
4457 if (k not in merged
4458 or (isinstance(v, compat_str) and v
4459 and isinstance(merged[k], compat_str)
4460 and not merged[k])):
6cc62232
S
4461 merged[k] = v
4462 return merged
4463
4464
8e60dc75
S
4465def encode_compat_str(string, encoding=preferredencoding(), errors='strict'):
4466 return string if isinstance(string, compat_str) else compat_str(string, encoding, errors)
4467
16392824 4468
a1a530b0
PH
4469US_RATINGS = {
4470 'G': 0,
4471 'PG': 10,
4472 'PG-13': 13,
4473 'R': 16,
4474 'NC': 18,
4475}
fac55558
PH
4476
4477
a8795327 4478TV_PARENTAL_GUIDELINES = {
5a16c9d9
RA
4479 'TV-Y': 0,
4480 'TV-Y7': 7,
4481 'TV-G': 0,
4482 'TV-PG': 0,
4483 'TV-14': 14,
4484 'TV-MA': 17,
a8795327
S
4485}
4486
4487
146c80e2 4488def parse_age_limit(s):
a8795327
S
4489 if type(s) == int:
4490 return s if 0 <= s <= 21 else None
4491 if not isinstance(s, compat_basestring):
d838b1bd 4492 return None
146c80e2 4493 m = re.match(r'^(?P<age>\d{1,2})\+?$', s)
a8795327
S
4494 if m:
4495 return int(m.group('age'))
5c5fae6d 4496 s = s.upper()
a8795327
S
4497 if s in US_RATINGS:
4498 return US_RATINGS[s]
5a16c9d9 4499 m = re.match(r'^TV[_-]?(%s)$' % '|'.join(k[3:] for k in TV_PARENTAL_GUIDELINES), s)
b8361187 4500 if m:
5a16c9d9 4501 return TV_PARENTAL_GUIDELINES['TV-' + m.group(1)]
b8361187 4502 return None
146c80e2
S
4503
4504
fac55558 4505def strip_jsonp(code):
609a61e3 4506 return re.sub(
5552c9eb 4507 r'''(?sx)^
e9c671d5 4508 (?:window\.)?(?P<func_name>[a-zA-Z0-9_.$]*)
5552c9eb
YCH
4509 (?:\s*&&\s*(?P=func_name))?
4510 \s*\(\s*(?P<callback_data>.*)\);?
4511 \s*?(?://[^\n]*)*$''',
4512 r'\g<callback_data>', code)
478c2c61
PH
4513
4514
5c610515 4515def js_to_json(code, vars={}):
4516 # vars is a dict of var, val pairs to substitute
c843e685 4517 COMMENT_RE = r'/\*(?:(?!\*/).)*?\*/|//[^\n]*\n'
4195096e
S
4518 SKIP_RE = r'\s*(?:{comment})?\s*'.format(comment=COMMENT_RE)
4519 INTEGER_TABLE = (
4520 (r'(?s)^(0[xX][0-9a-fA-F]+){skip}:?$'.format(skip=SKIP_RE), 16),
4521 (r'(?s)^(0+[0-7]+){skip}:?$'.format(skip=SKIP_RE), 8),
4522 )
4523
e05f6939 4524 def fix_kv(m):
e7b6d122
PH
4525 v = m.group(0)
4526 if v in ('true', 'false', 'null'):
4527 return v
421ddcb8
C
4528 elif v in ('undefined', 'void 0'):
4529 return 'null'
8bdd16b4 4530 elif v.startswith('/*') or v.startswith('//') or v.startswith('!') or v == ',':
bd1e4844 4531 return ""
4532
4533 if v[0] in ("'", '"'):
4534 v = re.sub(r'(?s)\\.|"', lambda m: {
e7b6d122 4535 '"': '\\"',
bd1e4844 4536 "\\'": "'",
4537 '\\\n': '',
4538 '\\x': '\\u00',
4539 }.get(m.group(0), m.group(0)), v[1:-1])
8bdd16b4 4540 else:
4541 for regex, base in INTEGER_TABLE:
4542 im = re.match(regex, v)
4543 if im:
4544 i = int(im.group(1), base)
4545 return '"%d":' % i if v.endswith(':') else '%d' % i
89ac4a19 4546
5c610515 4547 if v in vars:
4548 return vars[v]
4549
e7b6d122 4550 return '"%s"' % v
e05f6939 4551
bd1e4844 4552 return re.sub(r'''(?sx)
4553 "(?:[^"\\]*(?:\\\\|\\['"nurtbfx/\n]))*[^"\\]*"|
4554 '(?:[^'\\]*(?:\\\\|\\['"nurtbfx/\n]))*[^'\\]*'|
4195096e 4555 {comment}|,(?={skip}[\]}}])|
421ddcb8 4556 void\s0|(?:(?<![0-9])[eE]|[a-df-zA-DF-Z_$])[.a-zA-Z_$0-9]*|
4195096e 4557 \b(?:0[xX][0-9a-fA-F]+|0+[0-7]+)(?:{skip}:)?|
8bdd16b4 4558 [0-9]+(?={skip}:)|
4559 !+
4195096e 4560 '''.format(comment=COMMENT_RE, skip=SKIP_RE), fix_kv, code)
e05f6939
PH
4561
4562
478c2c61
PH
4563def qualities(quality_ids):
4564 """ Get a numeric quality value out of a list of possible values """
4565 def q(qid):
4566 try:
4567 return quality_ids.index(qid)
4568 except ValueError:
4569 return -1
4570 return q
4571
acd69589 4572
de6000d9 4573DEFAULT_OUTTMPL = {
4574 'default': '%(title)s [%(id)s].%(ext)s',
72755351 4575 'chapter': '%(title)s - %(section_number)03d %(section_title)s [%(id)s].%(ext)s',
de6000d9 4576}
4577OUTTMPL_TYPES = {
72755351 4578 'chapter': None,
de6000d9 4579 'subtitle': None,
4580 'thumbnail': None,
4581 'description': 'description',
4582 'annotation': 'annotations.xml',
4583 'infojson': 'info.json',
08438d2c 4584 'link': None,
5112f26a 4585 'pl_thumbnail': None,
de6000d9 4586 'pl_description': 'description',
4587 'pl_infojson': 'info.json',
4588}
0a871f68 4589
143db31d 4590# As of [1] format syntax is:
4591# %[mapping_key][conversion_flags][minimum_width][.precision][length_modifier]type
4592# 1. https://docs.python.org/2/library/stdtypes.html#string-formatting
901130bb 4593STR_FORMAT_RE_TMPL = r'''(?x)
4594 (?<!%)(?P<prefix>(?:%%)*)
143db31d 4595 %
524e2e4f 4596 (?P<has_key>\((?P<key>{0})\))?
752cda38 4597 (?P<format>
524e2e4f 4598 (?P<conversion>[#0\-+ ]+)?
4599 (?P<min_width>\d+)?
4600 (?P<precision>\.\d+)?
4601 (?P<len_mod>[hlL])? # unused in python
901130bb 4602 {1} # conversion type
752cda38 4603 )
143db31d 4604'''
4605
7d1eb38a 4606
901130bb 4607STR_FORMAT_TYPES = 'diouxXeEfFgGcrs'
a020a0dc 4608
7d1eb38a 4609
a020a0dc
PH
4610def limit_length(s, length):
4611 """ Add ellipses to overly long strings """
4612 if s is None:
4613 return None
4614 ELLIPSES = '...'
4615 if len(s) > length:
4616 return s[:length - len(ELLIPSES)] + ELLIPSES
4617 return s
48844745
PH
4618
4619
4620def version_tuple(v):
5f9b8394 4621 return tuple(int(e) for e in re.split(r'[-.]', v))
48844745
PH
4622
4623
4624def is_outdated_version(version, limit, assume_new=True):
4625 if not version:
4626 return not assume_new
4627 try:
4628 return version_tuple(version) < version_tuple(limit)
4629 except ValueError:
4630 return not assume_new
732ea2f0
PH
4631
4632
4633def ytdl_is_updateable():
7a5c1cfe 4634 """ Returns if yt-dlp can be updated with -U """
735d865e 4635
5d535b4a 4636 from .update import is_non_updateable
732ea2f0 4637
5d535b4a 4638 return not is_non_updateable()
7d4111ed
PH
4639
4640
4641def args_to_str(args):
4642 # Get a short string representation for a subprocess command
702ccf2d 4643 return ' '.join(compat_shlex_quote(a) for a in args)
2ccd1b10
PH
4644
4645
9b9c5355 4646def error_to_compat_str(err):
fdae2358
S
4647 err_str = str(err)
4648 # On python 2 error byte string must be decoded with proper
4649 # encoding rather than ascii
4650 if sys.version_info[0] < 3:
4651 err_str = err_str.decode(preferredencoding())
4652 return err_str
4653
4654
c460bdd5 4655def mimetype2ext(mt):
eb9ee194
S
4656 if mt is None:
4657 return None
4658
9359f3d4
F
4659 mt, _, params = mt.partition(';')
4660 mt = mt.strip()
4661
4662 FULL_MAP = {
765ac263 4663 'audio/mp4': 'm4a',
6c33d24b
YCH
4664 # Per RFC 3003, audio/mpeg can be .mp1, .mp2 or .mp3. Here use .mp3 as
4665 # it's the most popular one
4666 'audio/mpeg': 'mp3',
ba39289d 4667 'audio/x-wav': 'wav',
9359f3d4
F
4668 'audio/wav': 'wav',
4669 'audio/wave': 'wav',
4670 }
4671
4672 ext = FULL_MAP.get(mt)
765ac263
JMF
4673 if ext is not None:
4674 return ext
4675
9359f3d4 4676 SUBTYPE_MAP = {
f6861ec9 4677 '3gpp': '3gp',
cafcf657 4678 'smptett+xml': 'tt',
cafcf657 4679 'ttaf+xml': 'dfxp',
a0d8d704 4680 'ttml+xml': 'ttml',
f6861ec9 4681 'x-flv': 'flv',
a0d8d704 4682 'x-mp4-fragmented': 'mp4',
d4f05d47 4683 'x-ms-sami': 'sami',
a0d8d704 4684 'x-ms-wmv': 'wmv',
b4173f15
RA
4685 'mpegurl': 'm3u8',
4686 'x-mpegurl': 'm3u8',
4687 'vnd.apple.mpegurl': 'm3u8',
4688 'dash+xml': 'mpd',
b4173f15 4689 'f4m+xml': 'f4m',
f164b971 4690 'hds+xml': 'f4m',
e910fe2f 4691 'vnd.ms-sstr+xml': 'ism',
c2b2c7e1 4692 'quicktime': 'mov',
98ce1a3f 4693 'mp2t': 'ts',
39e7107d 4694 'x-wav': 'wav',
9359f3d4
F
4695 'filmstrip+json': 'fs',
4696 'svg+xml': 'svg',
4697 }
4698
4699 _, _, subtype = mt.rpartition('/')
4700 ext = SUBTYPE_MAP.get(subtype.lower())
4701 if ext is not None:
4702 return ext
4703
4704 SUFFIX_MAP = {
4705 'json': 'json',
4706 'xml': 'xml',
4707 'zip': 'zip',
4708 'gzip': 'gz',
4709 }
4710
4711 _, _, suffix = subtype.partition('+')
4712 ext = SUFFIX_MAP.get(suffix)
4713 if ext is not None:
4714 return ext
4715
4716 return subtype.replace('+', '.')
c460bdd5
PH
4717
4718
2814f12b
THD
4719def ext2mimetype(ext_or_url):
4720 if not ext_or_url:
4721 return None
4722 if '.' not in ext_or_url:
4723 ext_or_url = f'file.{ext_or_url}'
4724 return mimetypes.guess_type(ext_or_url)[0]
4725
4726
4f3c5e06 4727def parse_codecs(codecs_str):
4728 # http://tools.ietf.org/html/rfc6381
4729 if not codecs_str:
4730 return {}
a0566bbf 4731 split_codecs = list(filter(None, map(
dbf5416a 4732 str.strip, codecs_str.strip().strip(',').split(','))))
176f1866 4733 vcodec, acodec, hdr = None, None, None
a0566bbf 4734 for full_codec in split_codecs:
9bd979ca 4735 parts = full_codec.split('.')
4736 codec = parts[0].replace('0', '')
4737 if codec in ('avc1', 'avc2', 'avc3', 'avc4', 'vp9', 'vp8', 'hev1', 'hev2',
4738 'h263', 'h264', 'mp4v', 'hvc1', 'av1', 'theora', 'dvh1', 'dvhe'):
4f3c5e06 4739 if not vcodec:
9bd979ca 4740 vcodec = '.'.join(parts[:4]) if codec in ('vp9', 'av1') else full_codec
176f1866 4741 if codec in ('dvh1', 'dvhe'):
4742 hdr = 'DV'
9bd979ca 4743 elif codec == 'av1' and len(parts) > 3 and parts[3] == '10':
4744 hdr = 'HDR10'
4745 elif full_codec.replace('0', '').startswith('vp9.2'):
176f1866 4746 hdr = 'HDR10'
60f5c9fb 4747 elif codec in ('mp4a', 'opus', 'vorbis', 'mp3', 'aac', 'ac-3', 'ec-3', 'eac3', 'dtsc', 'dtse', 'dtsh', 'dtsl'):
4f3c5e06 4748 if not acodec:
4749 acodec = full_codec
4750 else:
60f5c9fb 4751 write_string('WARNING: Unknown codec %s\n' % full_codec, sys.stderr)
4f3c5e06 4752 if not vcodec and not acodec:
a0566bbf 4753 if len(split_codecs) == 2:
4f3c5e06 4754 return {
a0566bbf 4755 'vcodec': split_codecs[0],
4756 'acodec': split_codecs[1],
4f3c5e06 4757 }
4758 else:
4759 return {
4760 'vcodec': vcodec or 'none',
4761 'acodec': acodec or 'none',
176f1866 4762 'dynamic_range': hdr,
4f3c5e06 4763 }
4764 return {}
4765
4766
2ccd1b10 4767def urlhandle_detect_ext(url_handle):
79298173 4768 getheader = url_handle.headers.get
2ccd1b10 4769
b55ee18f
PH
4770 cd = getheader('Content-Disposition')
4771 if cd:
4772 m = re.match(r'attachment;\s*filename="(?P<filename>[^"]+)"', cd)
4773 if m:
4774 e = determine_ext(m.group('filename'), default_ext=None)
4775 if e:
4776 return e
4777
c460bdd5 4778 return mimetype2ext(getheader('Content-Type'))
05900629
PH
4779
4780
1e399778
YCH
4781def encode_data_uri(data, mime_type):
4782 return 'data:%s;base64,%s' % (mime_type, base64.b64encode(data).decode('ascii'))
4783
4784
05900629 4785def age_restricted(content_limit, age_limit):
6ec6cb4e 4786 """ Returns True iff the content should be blocked """
05900629
PH
4787
4788 if age_limit is None: # No limit set
4789 return False
4790 if content_limit is None:
4791 return False # Content available for everyone
4792 return age_limit < content_limit
61ca9a80
PH
4793
4794
4795def is_html(first_bytes):
4796 """ Detect whether a file contains HTML by examining its first bytes. """
4797
4798 BOMS = [
4799 (b'\xef\xbb\xbf', 'utf-8'),
4800 (b'\x00\x00\xfe\xff', 'utf-32-be'),
4801 (b'\xff\xfe\x00\x00', 'utf-32-le'),
4802 (b'\xff\xfe', 'utf-16-le'),
4803 (b'\xfe\xff', 'utf-16-be'),
4804 ]
4805 for bom, enc in BOMS:
4806 if first_bytes.startswith(bom):
4807 s = first_bytes[len(bom):].decode(enc, 'replace')
4808 break
4809 else:
4810 s = first_bytes.decode('utf-8', 'replace')
4811
4812 return re.match(r'^\s*<', s)
a055469f
PH
4813
4814
4815def determine_protocol(info_dict):
4816 protocol = info_dict.get('protocol')
4817 if protocol is not None:
4818 return protocol
4819
7de837a5 4820 url = sanitize_url(info_dict['url'])
a055469f
PH
4821 if url.startswith('rtmp'):
4822 return 'rtmp'
4823 elif url.startswith('mms'):
4824 return 'mms'
4825 elif url.startswith('rtsp'):
4826 return 'rtsp'
4827
4828 ext = determine_ext(url)
4829 if ext == 'm3u8':
4830 return 'm3u8'
4831 elif ext == 'f4m':
4832 return 'f4m'
4833
4834 return compat_urllib_parse_urlparse(url).scheme
cfb56d1a
PH
4835
4836
c5e3f849 4837def render_table(header_row, data, delim=False, extra_gap=0, hide_empty=False):
4838 """ Render a list of rows, each as a list of values.
4839 Text after a \t will be right aligned """
ec11a9f4 4840 def width(string):
c5e3f849 4841 return len(remove_terminal_sequences(string).replace('\t', ''))
76d321f6 4842
4843 def get_max_lens(table):
ec11a9f4 4844 return [max(width(str(v)) for v in col) for col in zip(*table)]
76d321f6 4845
4846 def filter_using_list(row, filterArray):
4847 return [col for (take, col) in zip(filterArray, row) if take]
4848
c5e3f849 4849 if hide_empty:
76d321f6 4850 max_lens = get_max_lens(data)
4851 header_row = filter_using_list(header_row, max_lens)
4852 data = [filter_using_list(row, max_lens) for row in data]
4853
cfb56d1a 4854 table = [header_row] + data
76d321f6 4855 max_lens = get_max_lens(table)
c5e3f849 4856 extra_gap += 1
76d321f6 4857 if delim:
c5e3f849 4858 table = [header_row, [delim * (ml + extra_gap) for ml in max_lens]] + data
4859 table[1][-1] = table[1][-1][:-extra_gap] # Remove extra_gap from end of delimiter
ec11a9f4 4860 for row in table:
4861 for pos, text in enumerate(map(str, row)):
c5e3f849 4862 if '\t' in text:
4863 row[pos] = text.replace('\t', ' ' * (max_lens[pos] - width(text))) + ' ' * extra_gap
4864 else:
4865 row[pos] = text + ' ' * (max_lens[pos] - width(text) + extra_gap)
4866 ret = '\n'.join(''.join(row).rstrip() for row in table)
ec11a9f4 4867 return ret
347de493
PH
4868
4869
8f18aca8 4870def _match_one(filter_part, dct, incomplete):
77b87f05 4871 # TODO: Generalize code with YoutubeDL._build_format_filter
a047eeb6 4872 STRING_OPERATORS = {
4873 '*=': operator.contains,
4874 '^=': lambda attr, value: attr.startswith(value),
4875 '$=': lambda attr, value: attr.endswith(value),
4876 '~=': lambda attr, value: re.search(value, attr),
4877 }
347de493 4878 COMPARISON_OPERATORS = {
a047eeb6 4879 **STRING_OPERATORS,
4880 '<=': operator.le, # "<=" must be defined above "<"
347de493 4881 '<': operator.lt,
347de493 4882 '>=': operator.ge,
a047eeb6 4883 '>': operator.gt,
347de493 4884 '=': operator.eq,
347de493 4885 }
a047eeb6 4886
347de493
PH
4887 operator_rex = re.compile(r'''(?x)\s*
4888 (?P<key>[a-z_]+)
77b87f05 4889 \s*(?P<negation>!\s*)?(?P<op>%s)(?P<none_inclusive>\s*\?)?\s*
347de493 4890 (?:
a047eeb6 4891 (?P<quote>["\'])(?P<quotedstrval>.+?)(?P=quote)|
4892 (?P<strval>.+?)
347de493
PH
4893 )
4894 \s*$
4895 ''' % '|'.join(map(re.escape, COMPARISON_OPERATORS.keys())))
4896 m = operator_rex.search(filter_part)
4897 if m:
18f96d12 4898 m = m.groupdict()
4899 unnegated_op = COMPARISON_OPERATORS[m['op']]
4900 if m['negation']:
77b87f05
MT
4901 op = lambda attr, value: not unnegated_op(attr, value)
4902 else:
4903 op = unnegated_op
18f96d12 4904 comparison_value = m['quotedstrval'] or m['strval'] or m['intval']
4905 if m['quote']:
4906 comparison_value = comparison_value.replace(r'\%s' % m['quote'], m['quote'])
4907 actual_value = dct.get(m['key'])
4908 numeric_comparison = None
4909 if isinstance(actual_value, compat_numeric_types):
e5a088dc
S
4910 # If the original field is a string and matching comparisonvalue is
4911 # a number we should respect the origin of the original field
4912 # and process comparison value as a string (see
18f96d12 4913 # https://github.com/ytdl-org/youtube-dl/issues/11082)
347de493 4914 try:
18f96d12 4915 numeric_comparison = int(comparison_value)
347de493 4916 except ValueError:
18f96d12 4917 numeric_comparison = parse_filesize(comparison_value)
4918 if numeric_comparison is None:
4919 numeric_comparison = parse_filesize(f'{comparison_value}B')
4920 if numeric_comparison is None:
4921 numeric_comparison = parse_duration(comparison_value)
4922 if numeric_comparison is not None and m['op'] in STRING_OPERATORS:
4923 raise ValueError('Operator %s only supports string values!' % m['op'])
347de493 4924 if actual_value is None:
18f96d12 4925 return incomplete or m['none_inclusive']
4926 return op(actual_value, comparison_value if numeric_comparison is None else numeric_comparison)
347de493
PH
4927
4928 UNARY_OPERATORS = {
1cc47c66
S
4929 '': lambda v: (v is True) if isinstance(v, bool) else (v is not None),
4930 '!': lambda v: (v is False) if isinstance(v, bool) else (v is None),
347de493
PH
4931 }
4932 operator_rex = re.compile(r'''(?x)\s*
4933 (?P<op>%s)\s*(?P<key>[a-z_]+)
4934 \s*$
4935 ''' % '|'.join(map(re.escape, UNARY_OPERATORS.keys())))
4936 m = operator_rex.search(filter_part)
4937 if m:
4938 op = UNARY_OPERATORS[m.group('op')]
4939 actual_value = dct.get(m.group('key'))
8f18aca8 4940 if incomplete and actual_value is None:
4941 return True
347de493
PH
4942 return op(actual_value)
4943
4944 raise ValueError('Invalid filter part %r' % filter_part)
4945
4946
8f18aca8 4947def match_str(filter_str, dct, incomplete=False):
4948 """ Filter a dictionary with a simple string syntax. Returns True (=passes filter) or false
4949 When incomplete, all conditions passes on missing fields
4950 """
347de493 4951 return all(
8f18aca8 4952 _match_one(filter_part.replace(r'\&', '&'), dct, incomplete)
a047eeb6 4953 for filter_part in re.split(r'(?<!\\)&', filter_str))
347de493
PH
4954
4955
4956def match_filter_func(filter_str):
8f18aca8 4957 def _match_func(info_dict, *args, **kwargs):
4958 if match_str(filter_str, info_dict, *args, **kwargs):
347de493
PH
4959 return None
4960 else:
4961 video_title = info_dict.get('title', info_dict.get('id', 'video'))
4962 return '%s does not pass filter %s, skipping ..' % (video_title, filter_str)
4963 return _match_func
91410c9b
PH
4964
4965
bf6427d2
YCH
4966def parse_dfxp_time_expr(time_expr):
4967 if not time_expr:
d631d5f9 4968 return
bf6427d2
YCH
4969
4970 mobj = re.match(r'^(?P<time_offset>\d+(?:\.\d+)?)s?$', time_expr)
4971 if mobj:
4972 return float(mobj.group('time_offset'))
4973
db2fe38b 4974 mobj = re.match(r'^(\d+):(\d\d):(\d\d(?:(?:\.|:)\d+)?)$', time_expr)
bf6427d2 4975 if mobj:
db2fe38b 4976 return 3600 * int(mobj.group(1)) + 60 * int(mobj.group(2)) + float(mobj.group(3).replace(':', '.'))
bf6427d2
YCH
4977
4978
c1c924ab 4979def srt_subtitles_timecode(seconds):
aa7785f8 4980 return '%02d:%02d:%02d,%03d' % timetuple_from_msec(seconds * 1000)
4981
4982
4983def ass_subtitles_timecode(seconds):
4984 time = timetuple_from_msec(seconds * 1000)
4985 return '%01d:%02d:%02d.%02d' % (*time[:-1], time.milliseconds / 10)
bf6427d2
YCH
4986
4987
4988def dfxp2srt(dfxp_data):
3869028f
YCH
4989 '''
4990 @param dfxp_data A bytes-like object containing DFXP data
4991 @returns A unicode object containing converted SRT data
4992 '''
5b995f71 4993 LEGACY_NAMESPACES = (
3869028f
YCH
4994 (b'http://www.w3.org/ns/ttml', [
4995 b'http://www.w3.org/2004/11/ttaf1',
4996 b'http://www.w3.org/2006/04/ttaf1',
4997 b'http://www.w3.org/2006/10/ttaf1',
5b995f71 4998 ]),
3869028f
YCH
4999 (b'http://www.w3.org/ns/ttml#styling', [
5000 b'http://www.w3.org/ns/ttml#style',
5b995f71
RA
5001 ]),
5002 )
5003
5004 SUPPORTED_STYLING = [
5005 'color',
5006 'fontFamily',
5007 'fontSize',
5008 'fontStyle',
5009 'fontWeight',
5010 'textDecoration'
5011 ]
5012
4e335771 5013 _x = functools.partial(xpath_with_ns, ns_map={
261f4730 5014 'xml': 'http://www.w3.org/XML/1998/namespace',
4e335771 5015 'ttml': 'http://www.w3.org/ns/ttml',
5b995f71 5016 'tts': 'http://www.w3.org/ns/ttml#styling',
4e335771 5017 })
bf6427d2 5018
5b995f71
RA
5019 styles = {}
5020 default_style = {}
5021
87de7069 5022 class TTMLPElementParser(object):
5b995f71
RA
5023 _out = ''
5024 _unclosed_elements = []
5025 _applied_styles = []
bf6427d2 5026
2b14cb56 5027 def start(self, tag, attrib):
5b995f71
RA
5028 if tag in (_x('ttml:br'), 'br'):
5029 self._out += '\n'
5030 else:
5031 unclosed_elements = []
5032 style = {}
5033 element_style_id = attrib.get('style')
5034 if default_style:
5035 style.update(default_style)
5036 if element_style_id:
5037 style.update(styles.get(element_style_id, {}))
5038 for prop in SUPPORTED_STYLING:
5039 prop_val = attrib.get(_x('tts:' + prop))
5040 if prop_val:
5041 style[prop] = prop_val
5042 if style:
5043 font = ''
5044 for k, v in sorted(style.items()):
5045 if self._applied_styles and self._applied_styles[-1].get(k) == v:
5046 continue
5047 if k == 'color':
5048 font += ' color="%s"' % v
5049 elif k == 'fontSize':
5050 font += ' size="%s"' % v
5051 elif k == 'fontFamily':
5052 font += ' face="%s"' % v
5053 elif k == 'fontWeight' and v == 'bold':
5054 self._out += '<b>'
5055 unclosed_elements.append('b')
5056 elif k == 'fontStyle' and v == 'italic':
5057 self._out += '<i>'
5058 unclosed_elements.append('i')
5059 elif k == 'textDecoration' and v == 'underline':
5060 self._out += '<u>'
5061 unclosed_elements.append('u')
5062 if font:
5063 self._out += '<font' + font + '>'
5064 unclosed_elements.append('font')
5065 applied_style = {}
5066 if self._applied_styles:
5067 applied_style.update(self._applied_styles[-1])
5068 applied_style.update(style)
5069 self._applied_styles.append(applied_style)
5070 self._unclosed_elements.append(unclosed_elements)
bf6427d2 5071
2b14cb56 5072 def end(self, tag):
5b995f71
RA
5073 if tag not in (_x('ttml:br'), 'br'):
5074 unclosed_elements = self._unclosed_elements.pop()
5075 for element in reversed(unclosed_elements):
5076 self._out += '</%s>' % element
5077 if unclosed_elements and self._applied_styles:
5078 self._applied_styles.pop()
bf6427d2 5079
2b14cb56 5080 def data(self, data):
5b995f71 5081 self._out += data
2b14cb56 5082
5083 def close(self):
5b995f71 5084 return self._out.strip()
2b14cb56 5085
5086 def parse_node(node):
5087 target = TTMLPElementParser()
5088 parser = xml.etree.ElementTree.XMLParser(target=target)
5089 parser.feed(xml.etree.ElementTree.tostring(node))
5090 return parser.close()
bf6427d2 5091
5b995f71
RA
5092 for k, v in LEGACY_NAMESPACES:
5093 for ns in v:
5094 dfxp_data = dfxp_data.replace(ns, k)
5095
3869028f 5096 dfxp = compat_etree_fromstring(dfxp_data)
bf6427d2 5097 out = []
5b995f71 5098 paras = dfxp.findall(_x('.//ttml:p')) or dfxp.findall('.//p')
1b0427e6
YCH
5099
5100 if not paras:
5101 raise ValueError('Invalid dfxp/TTML subtitle')
bf6427d2 5102
5b995f71
RA
5103 repeat = False
5104 while True:
5105 for style in dfxp.findall(_x('.//ttml:style')):
261f4730
RA
5106 style_id = style.get('id') or style.get(_x('xml:id'))
5107 if not style_id:
5108 continue
5b995f71
RA
5109 parent_style_id = style.get('style')
5110 if parent_style_id:
5111 if parent_style_id not in styles:
5112 repeat = True
5113 continue
5114 styles[style_id] = styles[parent_style_id].copy()
5115 for prop in SUPPORTED_STYLING:
5116 prop_val = style.get(_x('tts:' + prop))
5117 if prop_val:
5118 styles.setdefault(style_id, {})[prop] = prop_val
5119 if repeat:
5120 repeat = False
5121 else:
5122 break
5123
5124 for p in ('body', 'div'):
5125 ele = xpath_element(dfxp, [_x('.//ttml:' + p), './/' + p])
5126 if ele is None:
5127 continue
5128 style = styles.get(ele.get('style'))
5129 if not style:
5130 continue
5131 default_style.update(style)
5132
bf6427d2 5133 for para, index in zip(paras, itertools.count(1)):
d631d5f9 5134 begin_time = parse_dfxp_time_expr(para.attrib.get('begin'))
7dff0363 5135 end_time = parse_dfxp_time_expr(para.attrib.get('end'))
d631d5f9
YCH
5136 dur = parse_dfxp_time_expr(para.attrib.get('dur'))
5137 if begin_time is None:
5138 continue
7dff0363 5139 if not end_time:
d631d5f9
YCH
5140 if not dur:
5141 continue
5142 end_time = begin_time + dur
bf6427d2
YCH
5143 out.append('%d\n%s --> %s\n%s\n\n' % (
5144 index,
c1c924ab
YCH
5145 srt_subtitles_timecode(begin_time),
5146 srt_subtitles_timecode(end_time),
bf6427d2
YCH
5147 parse_node(para)))
5148
5149 return ''.join(out)
5150
5151
66e289ba
S
5152def cli_option(params, command_option, param):
5153 param = params.get(param)
98e698f1
RA
5154 if param:
5155 param = compat_str(param)
66e289ba
S
5156 return [command_option, param] if param is not None else []
5157
5158
5159def cli_bool_option(params, command_option, param, true_value='true', false_value='false', separator=None):
5160 param = params.get(param)
5b232f46
S
5161 if param is None:
5162 return []
66e289ba
S
5163 assert isinstance(param, bool)
5164 if separator:
5165 return [command_option + separator + (true_value if param else false_value)]
5166 return [command_option, true_value if param else false_value]
5167
5168
5169def cli_valueless_option(params, command_option, param, expected_value=True):
5170 param = params.get(param)
5171 return [command_option] if param == expected_value else []
5172
5173
e92caff5 5174def cli_configuration_args(argdict, keys, default=[], use_compat=True):
eab9b2bc 5175 if isinstance(argdict, (list, tuple)): # for backward compatibility
e92caff5 5176 if use_compat:
5b1ecbb3 5177 return argdict
5178 else:
5179 argdict = None
eab9b2bc 5180 if argdict is None:
5b1ecbb3 5181 return default
eab9b2bc 5182 assert isinstance(argdict, dict)
5183
e92caff5 5184 assert isinstance(keys, (list, tuple))
5185 for key_list in keys:
e92caff5 5186 arg_list = list(filter(
5187 lambda x: x is not None,
6606817a 5188 [argdict.get(key.lower()) for key in variadic(key_list)]))
e92caff5 5189 if arg_list:
5190 return [arg for args in arg_list for arg in args]
5191 return default
66e289ba 5192
6251555f 5193
330690a2 5194def _configuration_args(main_key, argdict, exe, keys=None, default=[], use_compat=True):
5195 main_key, exe = main_key.lower(), exe.lower()
5196 root_key = exe if main_key == exe else f'{main_key}+{exe}'
5197 keys = [f'{root_key}{k}' for k in (keys or [''])]
5198 if root_key in keys:
5199 if main_key != exe:
5200 keys.append((main_key, exe))
5201 keys.append('default')
5202 else:
5203 use_compat = False
5204 return cli_configuration_args(argdict, keys, default, use_compat)
5205
66e289ba 5206
39672624
YCH
5207class ISO639Utils(object):
5208 # See http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt
5209 _lang_map = {
5210 'aa': 'aar',
5211 'ab': 'abk',
5212 'ae': 'ave',
5213 'af': 'afr',
5214 'ak': 'aka',
5215 'am': 'amh',
5216 'an': 'arg',
5217 'ar': 'ara',
5218 'as': 'asm',
5219 'av': 'ava',
5220 'ay': 'aym',
5221 'az': 'aze',
5222 'ba': 'bak',
5223 'be': 'bel',
5224 'bg': 'bul',
5225 'bh': 'bih',
5226 'bi': 'bis',
5227 'bm': 'bam',
5228 'bn': 'ben',
5229 'bo': 'bod',
5230 'br': 'bre',
5231 'bs': 'bos',
5232 'ca': 'cat',
5233 'ce': 'che',
5234 'ch': 'cha',
5235 'co': 'cos',
5236 'cr': 'cre',
5237 'cs': 'ces',
5238 'cu': 'chu',
5239 'cv': 'chv',
5240 'cy': 'cym',
5241 'da': 'dan',
5242 'de': 'deu',
5243 'dv': 'div',
5244 'dz': 'dzo',
5245 'ee': 'ewe',
5246 'el': 'ell',
5247 'en': 'eng',
5248 'eo': 'epo',
5249 'es': 'spa',
5250 'et': 'est',
5251 'eu': 'eus',
5252 'fa': 'fas',
5253 'ff': 'ful',
5254 'fi': 'fin',
5255 'fj': 'fij',
5256 'fo': 'fao',
5257 'fr': 'fra',
5258 'fy': 'fry',
5259 'ga': 'gle',
5260 'gd': 'gla',
5261 'gl': 'glg',
5262 'gn': 'grn',
5263 'gu': 'guj',
5264 'gv': 'glv',
5265 'ha': 'hau',
5266 'he': 'heb',
b7acc835 5267 'iw': 'heb', # Replaced by he in 1989 revision
39672624
YCH
5268 'hi': 'hin',
5269 'ho': 'hmo',
5270 'hr': 'hrv',
5271 'ht': 'hat',
5272 'hu': 'hun',
5273 'hy': 'hye',
5274 'hz': 'her',
5275 'ia': 'ina',
5276 'id': 'ind',
b7acc835 5277 'in': 'ind', # Replaced by id in 1989 revision
39672624
YCH
5278 'ie': 'ile',
5279 'ig': 'ibo',
5280 'ii': 'iii',
5281 'ik': 'ipk',
5282 'io': 'ido',
5283 'is': 'isl',
5284 'it': 'ita',
5285 'iu': 'iku',
5286 'ja': 'jpn',
5287 'jv': 'jav',
5288 'ka': 'kat',
5289 'kg': 'kon',
5290 'ki': 'kik',
5291 'kj': 'kua',
5292 'kk': 'kaz',
5293 'kl': 'kal',
5294 'km': 'khm',
5295 'kn': 'kan',
5296 'ko': 'kor',
5297 'kr': 'kau',
5298 'ks': 'kas',
5299 'ku': 'kur',
5300 'kv': 'kom',
5301 'kw': 'cor',
5302 'ky': 'kir',
5303 'la': 'lat',
5304 'lb': 'ltz',
5305 'lg': 'lug',
5306 'li': 'lim',
5307 'ln': 'lin',
5308 'lo': 'lao',
5309 'lt': 'lit',
5310 'lu': 'lub',
5311 'lv': 'lav',
5312 'mg': 'mlg',
5313 'mh': 'mah',
5314 'mi': 'mri',
5315 'mk': 'mkd',
5316 'ml': 'mal',
5317 'mn': 'mon',
5318 'mr': 'mar',
5319 'ms': 'msa',
5320 'mt': 'mlt',
5321 'my': 'mya',
5322 'na': 'nau',
5323 'nb': 'nob',
5324 'nd': 'nde',
5325 'ne': 'nep',
5326 'ng': 'ndo',
5327 'nl': 'nld',
5328 'nn': 'nno',
5329 'no': 'nor',
5330 'nr': 'nbl',
5331 'nv': 'nav',
5332 'ny': 'nya',
5333 'oc': 'oci',
5334 'oj': 'oji',
5335 'om': 'orm',
5336 'or': 'ori',
5337 'os': 'oss',
5338 'pa': 'pan',
5339 'pi': 'pli',
5340 'pl': 'pol',
5341 'ps': 'pus',
5342 'pt': 'por',
5343 'qu': 'que',
5344 'rm': 'roh',
5345 'rn': 'run',
5346 'ro': 'ron',
5347 'ru': 'rus',
5348 'rw': 'kin',
5349 'sa': 'san',
5350 'sc': 'srd',
5351 'sd': 'snd',
5352 'se': 'sme',
5353 'sg': 'sag',
5354 'si': 'sin',
5355 'sk': 'slk',
5356 'sl': 'slv',
5357 'sm': 'smo',
5358 'sn': 'sna',
5359 'so': 'som',
5360 'sq': 'sqi',
5361 'sr': 'srp',
5362 'ss': 'ssw',
5363 'st': 'sot',
5364 'su': 'sun',
5365 'sv': 'swe',
5366 'sw': 'swa',
5367 'ta': 'tam',
5368 'te': 'tel',
5369 'tg': 'tgk',
5370 'th': 'tha',
5371 'ti': 'tir',
5372 'tk': 'tuk',
5373 'tl': 'tgl',
5374 'tn': 'tsn',
5375 'to': 'ton',
5376 'tr': 'tur',
5377 'ts': 'tso',
5378 'tt': 'tat',
5379 'tw': 'twi',
5380 'ty': 'tah',
5381 'ug': 'uig',
5382 'uk': 'ukr',
5383 'ur': 'urd',
5384 'uz': 'uzb',
5385 've': 'ven',
5386 'vi': 'vie',
5387 'vo': 'vol',
5388 'wa': 'wln',
5389 'wo': 'wol',
5390 'xh': 'xho',
5391 'yi': 'yid',
e9a50fba 5392 'ji': 'yid', # Replaced by yi in 1989 revision
39672624
YCH
5393 'yo': 'yor',
5394 'za': 'zha',
5395 'zh': 'zho',
5396 'zu': 'zul',
5397 }
5398
5399 @classmethod
5400 def short2long(cls, code):
5401 """Convert language code from ISO 639-1 to ISO 639-2/T"""
5402 return cls._lang_map.get(code[:2])
5403
5404 @classmethod
5405 def long2short(cls, code):
5406 """Convert language code from ISO 639-2/T to ISO 639-1"""
5407 for short_name, long_name in cls._lang_map.items():
5408 if long_name == code:
5409 return short_name
5410
5411
4eb10f66
YCH
5412class ISO3166Utils(object):
5413 # From http://data.okfn.org/data/core/country-list
5414 _country_map = {
5415 'AF': 'Afghanistan',
5416 'AX': 'Åland Islands',
5417 'AL': 'Albania',
5418 'DZ': 'Algeria',
5419 'AS': 'American Samoa',
5420 'AD': 'Andorra',
5421 'AO': 'Angola',
5422 'AI': 'Anguilla',
5423 'AQ': 'Antarctica',
5424 'AG': 'Antigua and Barbuda',
5425 'AR': 'Argentina',
5426 'AM': 'Armenia',
5427 'AW': 'Aruba',
5428 'AU': 'Australia',
5429 'AT': 'Austria',
5430 'AZ': 'Azerbaijan',
5431 'BS': 'Bahamas',
5432 'BH': 'Bahrain',
5433 'BD': 'Bangladesh',
5434 'BB': 'Barbados',
5435 'BY': 'Belarus',
5436 'BE': 'Belgium',
5437 'BZ': 'Belize',
5438 'BJ': 'Benin',
5439 'BM': 'Bermuda',
5440 'BT': 'Bhutan',
5441 'BO': 'Bolivia, Plurinational State of',
5442 'BQ': 'Bonaire, Sint Eustatius and Saba',
5443 'BA': 'Bosnia and Herzegovina',
5444 'BW': 'Botswana',
5445 'BV': 'Bouvet Island',
5446 'BR': 'Brazil',
5447 'IO': 'British Indian Ocean Territory',
5448 'BN': 'Brunei Darussalam',
5449 'BG': 'Bulgaria',
5450 'BF': 'Burkina Faso',
5451 'BI': 'Burundi',
5452 'KH': 'Cambodia',
5453 'CM': 'Cameroon',
5454 'CA': 'Canada',
5455 'CV': 'Cape Verde',
5456 'KY': 'Cayman Islands',
5457 'CF': 'Central African Republic',
5458 'TD': 'Chad',
5459 'CL': 'Chile',
5460 'CN': 'China',
5461 'CX': 'Christmas Island',
5462 'CC': 'Cocos (Keeling) Islands',
5463 'CO': 'Colombia',
5464 'KM': 'Comoros',
5465 'CG': 'Congo',
5466 'CD': 'Congo, the Democratic Republic of the',
5467 'CK': 'Cook Islands',
5468 'CR': 'Costa Rica',
5469 'CI': 'Côte d\'Ivoire',
5470 'HR': 'Croatia',
5471 'CU': 'Cuba',
5472 'CW': 'Curaçao',
5473 'CY': 'Cyprus',
5474 'CZ': 'Czech Republic',
5475 'DK': 'Denmark',
5476 'DJ': 'Djibouti',
5477 'DM': 'Dominica',
5478 'DO': 'Dominican Republic',
5479 'EC': 'Ecuador',
5480 'EG': 'Egypt',
5481 'SV': 'El Salvador',
5482 'GQ': 'Equatorial Guinea',
5483 'ER': 'Eritrea',
5484 'EE': 'Estonia',
5485 'ET': 'Ethiopia',
5486 'FK': 'Falkland Islands (Malvinas)',
5487 'FO': 'Faroe Islands',
5488 'FJ': 'Fiji',
5489 'FI': 'Finland',
5490 'FR': 'France',
5491 'GF': 'French Guiana',
5492 'PF': 'French Polynesia',
5493 'TF': 'French Southern Territories',
5494 'GA': 'Gabon',
5495 'GM': 'Gambia',
5496 'GE': 'Georgia',
5497 'DE': 'Germany',
5498 'GH': 'Ghana',
5499 'GI': 'Gibraltar',
5500 'GR': 'Greece',
5501 'GL': 'Greenland',
5502 'GD': 'Grenada',
5503 'GP': 'Guadeloupe',
5504 'GU': 'Guam',
5505 'GT': 'Guatemala',
5506 'GG': 'Guernsey',
5507 'GN': 'Guinea',
5508 'GW': 'Guinea-Bissau',
5509 'GY': 'Guyana',
5510 'HT': 'Haiti',
5511 'HM': 'Heard Island and McDonald Islands',
5512 'VA': 'Holy See (Vatican City State)',
5513 'HN': 'Honduras',
5514 'HK': 'Hong Kong',
5515 'HU': 'Hungary',
5516 'IS': 'Iceland',
5517 'IN': 'India',
5518 'ID': 'Indonesia',
5519 'IR': 'Iran, Islamic Republic of',
5520 'IQ': 'Iraq',
5521 'IE': 'Ireland',
5522 'IM': 'Isle of Man',
5523 'IL': 'Israel',
5524 'IT': 'Italy',
5525 'JM': 'Jamaica',
5526 'JP': 'Japan',
5527 'JE': 'Jersey',
5528 'JO': 'Jordan',
5529 'KZ': 'Kazakhstan',
5530 'KE': 'Kenya',
5531 'KI': 'Kiribati',
5532 'KP': 'Korea, Democratic People\'s Republic of',
5533 'KR': 'Korea, Republic of',
5534 'KW': 'Kuwait',
5535 'KG': 'Kyrgyzstan',
5536 'LA': 'Lao People\'s Democratic Republic',
5537 'LV': 'Latvia',
5538 'LB': 'Lebanon',
5539 'LS': 'Lesotho',
5540 'LR': 'Liberia',
5541 'LY': 'Libya',
5542 'LI': 'Liechtenstein',
5543 'LT': 'Lithuania',
5544 'LU': 'Luxembourg',
5545 'MO': 'Macao',
5546 'MK': 'Macedonia, the Former Yugoslav Republic of',
5547 'MG': 'Madagascar',
5548 'MW': 'Malawi',
5549 'MY': 'Malaysia',
5550 'MV': 'Maldives',
5551 'ML': 'Mali',
5552 'MT': 'Malta',
5553 'MH': 'Marshall Islands',
5554 'MQ': 'Martinique',
5555 'MR': 'Mauritania',
5556 'MU': 'Mauritius',
5557 'YT': 'Mayotte',
5558 'MX': 'Mexico',
5559 'FM': 'Micronesia, Federated States of',
5560 'MD': 'Moldova, Republic of',
5561 'MC': 'Monaco',
5562 'MN': 'Mongolia',
5563 'ME': 'Montenegro',
5564 'MS': 'Montserrat',
5565 'MA': 'Morocco',
5566 'MZ': 'Mozambique',
5567 'MM': 'Myanmar',
5568 'NA': 'Namibia',
5569 'NR': 'Nauru',
5570 'NP': 'Nepal',
5571 'NL': 'Netherlands',
5572 'NC': 'New Caledonia',
5573 'NZ': 'New Zealand',
5574 'NI': 'Nicaragua',
5575 'NE': 'Niger',
5576 'NG': 'Nigeria',
5577 'NU': 'Niue',
5578 'NF': 'Norfolk Island',
5579 'MP': 'Northern Mariana Islands',
5580 'NO': 'Norway',
5581 'OM': 'Oman',
5582 'PK': 'Pakistan',
5583 'PW': 'Palau',
5584 'PS': 'Palestine, State of',
5585 'PA': 'Panama',
5586 'PG': 'Papua New Guinea',
5587 'PY': 'Paraguay',
5588 'PE': 'Peru',
5589 'PH': 'Philippines',
5590 'PN': 'Pitcairn',
5591 'PL': 'Poland',
5592 'PT': 'Portugal',
5593 'PR': 'Puerto Rico',
5594 'QA': 'Qatar',
5595 'RE': 'Réunion',
5596 'RO': 'Romania',
5597 'RU': 'Russian Federation',
5598 'RW': 'Rwanda',
5599 'BL': 'Saint Barthélemy',
5600 'SH': 'Saint Helena, Ascension and Tristan da Cunha',
5601 'KN': 'Saint Kitts and Nevis',
5602 'LC': 'Saint Lucia',
5603 'MF': 'Saint Martin (French part)',
5604 'PM': 'Saint Pierre and Miquelon',
5605 'VC': 'Saint Vincent and the Grenadines',
5606 'WS': 'Samoa',
5607 'SM': 'San Marino',
5608 'ST': 'Sao Tome and Principe',
5609 'SA': 'Saudi Arabia',
5610 'SN': 'Senegal',
5611 'RS': 'Serbia',
5612 'SC': 'Seychelles',
5613 'SL': 'Sierra Leone',
5614 'SG': 'Singapore',
5615 'SX': 'Sint Maarten (Dutch part)',
5616 'SK': 'Slovakia',
5617 'SI': 'Slovenia',
5618 'SB': 'Solomon Islands',
5619 'SO': 'Somalia',
5620 'ZA': 'South Africa',
5621 'GS': 'South Georgia and the South Sandwich Islands',
5622 'SS': 'South Sudan',
5623 'ES': 'Spain',
5624 'LK': 'Sri Lanka',
5625 'SD': 'Sudan',
5626 'SR': 'Suriname',
5627 'SJ': 'Svalbard and Jan Mayen',
5628 'SZ': 'Swaziland',
5629 'SE': 'Sweden',
5630 'CH': 'Switzerland',
5631 'SY': 'Syrian Arab Republic',
5632 'TW': 'Taiwan, Province of China',
5633 'TJ': 'Tajikistan',
5634 'TZ': 'Tanzania, United Republic of',
5635 'TH': 'Thailand',
5636 'TL': 'Timor-Leste',
5637 'TG': 'Togo',
5638 'TK': 'Tokelau',
5639 'TO': 'Tonga',
5640 'TT': 'Trinidad and Tobago',
5641 'TN': 'Tunisia',
5642 'TR': 'Turkey',
5643 'TM': 'Turkmenistan',
5644 'TC': 'Turks and Caicos Islands',
5645 'TV': 'Tuvalu',
5646 'UG': 'Uganda',
5647 'UA': 'Ukraine',
5648 'AE': 'United Arab Emirates',
5649 'GB': 'United Kingdom',
5650 'US': 'United States',
5651 'UM': 'United States Minor Outlying Islands',
5652 'UY': 'Uruguay',
5653 'UZ': 'Uzbekistan',
5654 'VU': 'Vanuatu',
5655 'VE': 'Venezuela, Bolivarian Republic of',
5656 'VN': 'Viet Nam',
5657 'VG': 'Virgin Islands, British',
5658 'VI': 'Virgin Islands, U.S.',
5659 'WF': 'Wallis and Futuna',
5660 'EH': 'Western Sahara',
5661 'YE': 'Yemen',
5662 'ZM': 'Zambia',
5663 'ZW': 'Zimbabwe',
5664 }
5665
5666 @classmethod
5667 def short2full(cls, code):
5668 """Convert an ISO 3166-2 country code to the corresponding full name"""
5669 return cls._country_map.get(code.upper())
5670
5671
773f291d
S
5672class GeoUtils(object):
5673 # Major IPv4 address blocks per country
5674 _country_ip_map = {
53896ca5 5675 'AD': '46.172.224.0/19',
773f291d
S
5676 'AE': '94.200.0.0/13',
5677 'AF': '149.54.0.0/17',
5678 'AG': '209.59.64.0/18',
5679 'AI': '204.14.248.0/21',
5680 'AL': '46.99.0.0/16',
5681 'AM': '46.70.0.0/15',
5682 'AO': '105.168.0.0/13',
53896ca5
S
5683 'AP': '182.50.184.0/21',
5684 'AQ': '23.154.160.0/24',
773f291d
S
5685 'AR': '181.0.0.0/12',
5686 'AS': '202.70.112.0/20',
53896ca5 5687 'AT': '77.116.0.0/14',
773f291d
S
5688 'AU': '1.128.0.0/11',
5689 'AW': '181.41.0.0/18',
53896ca5
S
5690 'AX': '185.217.4.0/22',
5691 'AZ': '5.197.0.0/16',
773f291d
S
5692 'BA': '31.176.128.0/17',
5693 'BB': '65.48.128.0/17',
5694 'BD': '114.130.0.0/16',
5695 'BE': '57.0.0.0/8',
53896ca5 5696 'BF': '102.178.0.0/15',
773f291d
S
5697 'BG': '95.42.0.0/15',
5698 'BH': '37.131.0.0/17',
5699 'BI': '154.117.192.0/18',
5700 'BJ': '137.255.0.0/16',
53896ca5 5701 'BL': '185.212.72.0/23',
773f291d
S
5702 'BM': '196.12.64.0/18',
5703 'BN': '156.31.0.0/16',
5704 'BO': '161.56.0.0/16',
5705 'BQ': '161.0.80.0/20',
53896ca5 5706 'BR': '191.128.0.0/12',
773f291d
S
5707 'BS': '24.51.64.0/18',
5708 'BT': '119.2.96.0/19',
5709 'BW': '168.167.0.0/16',
5710 'BY': '178.120.0.0/13',
5711 'BZ': '179.42.192.0/18',
5712 'CA': '99.224.0.0/11',
5713 'CD': '41.243.0.0/16',
53896ca5
S
5714 'CF': '197.242.176.0/21',
5715 'CG': '160.113.0.0/16',
773f291d 5716 'CH': '85.0.0.0/13',
53896ca5 5717 'CI': '102.136.0.0/14',
773f291d
S
5718 'CK': '202.65.32.0/19',
5719 'CL': '152.172.0.0/14',
53896ca5 5720 'CM': '102.244.0.0/14',
773f291d
S
5721 'CN': '36.128.0.0/10',
5722 'CO': '181.240.0.0/12',
5723 'CR': '201.192.0.0/12',
5724 'CU': '152.206.0.0/15',
5725 'CV': '165.90.96.0/19',
5726 'CW': '190.88.128.0/17',
53896ca5 5727 'CY': '31.153.0.0/16',
773f291d
S
5728 'CZ': '88.100.0.0/14',
5729 'DE': '53.0.0.0/8',
5730 'DJ': '197.241.0.0/17',
5731 'DK': '87.48.0.0/12',
5732 'DM': '192.243.48.0/20',
5733 'DO': '152.166.0.0/15',
5734 'DZ': '41.96.0.0/12',
5735 'EC': '186.68.0.0/15',
5736 'EE': '90.190.0.0/15',
5737 'EG': '156.160.0.0/11',
5738 'ER': '196.200.96.0/20',
5739 'ES': '88.0.0.0/11',
5740 'ET': '196.188.0.0/14',
5741 'EU': '2.16.0.0/13',
5742 'FI': '91.152.0.0/13',
5743 'FJ': '144.120.0.0/16',
53896ca5 5744 'FK': '80.73.208.0/21',
773f291d
S
5745 'FM': '119.252.112.0/20',
5746 'FO': '88.85.32.0/19',
5747 'FR': '90.0.0.0/9',
5748 'GA': '41.158.0.0/15',
5749 'GB': '25.0.0.0/8',
5750 'GD': '74.122.88.0/21',
5751 'GE': '31.146.0.0/16',
5752 'GF': '161.22.64.0/18',
5753 'GG': '62.68.160.0/19',
53896ca5
S
5754 'GH': '154.160.0.0/12',
5755 'GI': '95.164.0.0/16',
773f291d
S
5756 'GL': '88.83.0.0/19',
5757 'GM': '160.182.0.0/15',
5758 'GN': '197.149.192.0/18',
5759 'GP': '104.250.0.0/19',
5760 'GQ': '105.235.224.0/20',
5761 'GR': '94.64.0.0/13',
5762 'GT': '168.234.0.0/16',
5763 'GU': '168.123.0.0/16',
5764 'GW': '197.214.80.0/20',
5765 'GY': '181.41.64.0/18',
5766 'HK': '113.252.0.0/14',
5767 'HN': '181.210.0.0/16',
5768 'HR': '93.136.0.0/13',
5769 'HT': '148.102.128.0/17',
5770 'HU': '84.0.0.0/14',
5771 'ID': '39.192.0.0/10',
5772 'IE': '87.32.0.0/12',
5773 'IL': '79.176.0.0/13',
5774 'IM': '5.62.80.0/20',
5775 'IN': '117.192.0.0/10',
5776 'IO': '203.83.48.0/21',
5777 'IQ': '37.236.0.0/14',
5778 'IR': '2.176.0.0/12',
5779 'IS': '82.221.0.0/16',
5780 'IT': '79.0.0.0/10',
5781 'JE': '87.244.64.0/18',
5782 'JM': '72.27.0.0/17',
5783 'JO': '176.29.0.0/16',
53896ca5 5784 'JP': '133.0.0.0/8',
773f291d
S
5785 'KE': '105.48.0.0/12',
5786 'KG': '158.181.128.0/17',
5787 'KH': '36.37.128.0/17',
5788 'KI': '103.25.140.0/22',
5789 'KM': '197.255.224.0/20',
53896ca5 5790 'KN': '198.167.192.0/19',
773f291d
S
5791 'KP': '175.45.176.0/22',
5792 'KR': '175.192.0.0/10',
5793 'KW': '37.36.0.0/14',
5794 'KY': '64.96.0.0/15',
5795 'KZ': '2.72.0.0/13',
5796 'LA': '115.84.64.0/18',
5797 'LB': '178.135.0.0/16',
53896ca5 5798 'LC': '24.92.144.0/20',
773f291d
S
5799 'LI': '82.117.0.0/19',
5800 'LK': '112.134.0.0/15',
53896ca5 5801 'LR': '102.183.0.0/16',
773f291d
S
5802 'LS': '129.232.0.0/17',
5803 'LT': '78.56.0.0/13',
5804 'LU': '188.42.0.0/16',
5805 'LV': '46.109.0.0/16',
5806 'LY': '41.252.0.0/14',
5807 'MA': '105.128.0.0/11',
5808 'MC': '88.209.64.0/18',
5809 'MD': '37.246.0.0/16',
5810 'ME': '178.175.0.0/17',
5811 'MF': '74.112.232.0/21',
5812 'MG': '154.126.0.0/17',
5813 'MH': '117.103.88.0/21',
5814 'MK': '77.28.0.0/15',
5815 'ML': '154.118.128.0/18',
5816 'MM': '37.111.0.0/17',
5817 'MN': '49.0.128.0/17',
5818 'MO': '60.246.0.0/16',
5819 'MP': '202.88.64.0/20',
5820 'MQ': '109.203.224.0/19',
5821 'MR': '41.188.64.0/18',
5822 'MS': '208.90.112.0/22',
5823 'MT': '46.11.0.0/16',
5824 'MU': '105.16.0.0/12',
5825 'MV': '27.114.128.0/18',
53896ca5 5826 'MW': '102.70.0.0/15',
773f291d
S
5827 'MX': '187.192.0.0/11',
5828 'MY': '175.136.0.0/13',
5829 'MZ': '197.218.0.0/15',
5830 'NA': '41.182.0.0/16',
5831 'NC': '101.101.0.0/18',
5832 'NE': '197.214.0.0/18',
5833 'NF': '203.17.240.0/22',
5834 'NG': '105.112.0.0/12',
5835 'NI': '186.76.0.0/15',
5836 'NL': '145.96.0.0/11',
5837 'NO': '84.208.0.0/13',
5838 'NP': '36.252.0.0/15',
5839 'NR': '203.98.224.0/19',
5840 'NU': '49.156.48.0/22',
5841 'NZ': '49.224.0.0/14',
5842 'OM': '5.36.0.0/15',
5843 'PA': '186.72.0.0/15',
5844 'PE': '186.160.0.0/14',
5845 'PF': '123.50.64.0/18',
5846 'PG': '124.240.192.0/19',
5847 'PH': '49.144.0.0/13',
5848 'PK': '39.32.0.0/11',
5849 'PL': '83.0.0.0/11',
5850 'PM': '70.36.0.0/20',
5851 'PR': '66.50.0.0/16',
5852 'PS': '188.161.0.0/16',
5853 'PT': '85.240.0.0/13',
5854 'PW': '202.124.224.0/20',
5855 'PY': '181.120.0.0/14',
5856 'QA': '37.210.0.0/15',
53896ca5 5857 'RE': '102.35.0.0/16',
773f291d 5858 'RO': '79.112.0.0/13',
53896ca5 5859 'RS': '93.86.0.0/15',
773f291d 5860 'RU': '5.136.0.0/13',
53896ca5 5861 'RW': '41.186.0.0/16',
773f291d
S
5862 'SA': '188.48.0.0/13',
5863 'SB': '202.1.160.0/19',
5864 'SC': '154.192.0.0/11',
53896ca5 5865 'SD': '102.120.0.0/13',
773f291d 5866 'SE': '78.64.0.0/12',
53896ca5 5867 'SG': '8.128.0.0/10',
773f291d
S
5868 'SI': '188.196.0.0/14',
5869 'SK': '78.98.0.0/15',
53896ca5 5870 'SL': '102.143.0.0/17',
773f291d
S
5871 'SM': '89.186.32.0/19',
5872 'SN': '41.82.0.0/15',
53896ca5 5873 'SO': '154.115.192.0/18',
773f291d
S
5874 'SR': '186.179.128.0/17',
5875 'SS': '105.235.208.0/21',
5876 'ST': '197.159.160.0/19',
5877 'SV': '168.243.0.0/16',
5878 'SX': '190.102.0.0/20',
5879 'SY': '5.0.0.0/16',
5880 'SZ': '41.84.224.0/19',
5881 'TC': '65.255.48.0/20',
5882 'TD': '154.68.128.0/19',
5883 'TG': '196.168.0.0/14',
5884 'TH': '171.96.0.0/13',
5885 'TJ': '85.9.128.0/18',
5886 'TK': '27.96.24.0/21',
5887 'TL': '180.189.160.0/20',
5888 'TM': '95.85.96.0/19',
5889 'TN': '197.0.0.0/11',
5890 'TO': '175.176.144.0/21',
5891 'TR': '78.160.0.0/11',
5892 'TT': '186.44.0.0/15',
5893 'TV': '202.2.96.0/19',
5894 'TW': '120.96.0.0/11',
5895 'TZ': '156.156.0.0/14',
53896ca5
S
5896 'UA': '37.52.0.0/14',
5897 'UG': '102.80.0.0/13',
5898 'US': '6.0.0.0/8',
773f291d 5899 'UY': '167.56.0.0/13',
53896ca5 5900 'UZ': '84.54.64.0/18',
773f291d 5901 'VA': '212.77.0.0/19',
53896ca5 5902 'VC': '207.191.240.0/21',
773f291d 5903 'VE': '186.88.0.0/13',
53896ca5 5904 'VG': '66.81.192.0/20',
773f291d
S
5905 'VI': '146.226.0.0/16',
5906 'VN': '14.160.0.0/11',
5907 'VU': '202.80.32.0/20',
5908 'WF': '117.20.32.0/21',
5909 'WS': '202.4.32.0/19',
5910 'YE': '134.35.0.0/16',
5911 'YT': '41.242.116.0/22',
5912 'ZA': '41.0.0.0/11',
53896ca5
S
5913 'ZM': '102.144.0.0/13',
5914 'ZW': '102.177.192.0/18',
773f291d
S
5915 }
5916
5917 @classmethod
5f95927a
S
5918 def random_ipv4(cls, code_or_block):
5919 if len(code_or_block) == 2:
5920 block = cls._country_ip_map.get(code_or_block.upper())
5921 if not block:
5922 return None
5923 else:
5924 block = code_or_block
773f291d
S
5925 addr, preflen = block.split('/')
5926 addr_min = compat_struct_unpack('!L', socket.inet_aton(addr))[0]
5927 addr_max = addr_min | (0xffffffff >> int(preflen))
18a0defa 5928 return compat_str(socket.inet_ntoa(
4248dad9 5929 compat_struct_pack('!L', random.randint(addr_min, addr_max))))
773f291d
S
5930
5931
91410c9b 5932class PerRequestProxyHandler(compat_urllib_request.ProxyHandler):
2461f79d
PH
5933 def __init__(self, proxies=None):
5934 # Set default handlers
5935 for type in ('http', 'https'):
5936 setattr(self, '%s_open' % type,
5937 lambda r, proxy='__noproxy__', type=type, meth=self.proxy_open:
5938 meth(r, proxy, type))
38e87f6c 5939 compat_urllib_request.ProxyHandler.__init__(self, proxies)
2461f79d 5940
91410c9b 5941 def proxy_open(self, req, proxy, type):
2461f79d 5942 req_proxy = req.headers.get('Ytdl-request-proxy')
91410c9b
PH
5943 if req_proxy is not None:
5944 proxy = req_proxy
2461f79d
PH
5945 del req.headers['Ytdl-request-proxy']
5946
5947 if proxy == '__noproxy__':
5948 return None # No Proxy
51fb4995 5949 if compat_urlparse.urlparse(proxy).scheme.lower() in ('socks', 'socks4', 'socks4a', 'socks5'):
71aff188 5950 req.add_header('Ytdl-socks-proxy', proxy)
7a5c1cfe 5951 # yt-dlp's http/https handlers do wrapping the socket with socks
71aff188 5952 return None
91410c9b
PH
5953 return compat_urllib_request.ProxyHandler.proxy_open(
5954 self, req, proxy, type)
5bc880b9
YCH
5955
5956
0a5445dd
YCH
5957# Both long_to_bytes and bytes_to_long are adapted from PyCrypto, which is
5958# released into Public Domain
5959# https://github.com/dlitz/pycrypto/blob/master/lib/Crypto/Util/number.py#L387
5960
5961def long_to_bytes(n, blocksize=0):
5962 """long_to_bytes(n:long, blocksize:int) : string
5963 Convert a long integer to a byte string.
5964
5965 If optional blocksize is given and greater than zero, pad the front of the
5966 byte string with binary zeros so that the length is a multiple of
5967 blocksize.
5968 """
5969 # after much testing, this algorithm was deemed to be the fastest
5970 s = b''
5971 n = int(n)
5972 while n > 0:
5973 s = compat_struct_pack('>I', n & 0xffffffff) + s
5974 n = n >> 32
5975 # strip off leading zeros
5976 for i in range(len(s)):
5977 if s[i] != b'\000'[0]:
5978 break
5979 else:
5980 # only happens when n == 0
5981 s = b'\000'
5982 i = 0
5983 s = s[i:]
5984 # add back some pad bytes. this could be done more efficiently w.r.t. the
5985 # de-padding being done above, but sigh...
5986 if blocksize > 0 and len(s) % blocksize:
5987 s = (blocksize - len(s) % blocksize) * b'\000' + s
5988 return s
5989
5990
5991def bytes_to_long(s):
5992 """bytes_to_long(string) : long
5993 Convert a byte string to a long integer.
5994
5995 This is (essentially) the inverse of long_to_bytes().
5996 """
5997 acc = 0
5998 length = len(s)
5999 if length % 4:
6000 extra = (4 - length % 4)
6001 s = b'\000' * extra + s
6002 length = length + extra
6003 for i in range(0, length, 4):
6004 acc = (acc << 32) + compat_struct_unpack('>I', s[i:i + 4])[0]
6005 return acc
6006
6007
5bc880b9
YCH
6008def ohdave_rsa_encrypt(data, exponent, modulus):
6009 '''
6010 Implement OHDave's RSA algorithm. See http://www.ohdave.com/rsa/
6011
6012 Input:
6013 data: data to encrypt, bytes-like object
6014 exponent, modulus: parameter e and N of RSA algorithm, both integer
6015 Output: hex string of encrypted data
6016
6017 Limitation: supports one block encryption only
6018 '''
6019
6020 payload = int(binascii.hexlify(data[::-1]), 16)
6021 encrypted = pow(payload, exponent, modulus)
6022 return '%x' % encrypted
81bdc8fd
YCH
6023
6024
f48409c7
YCH
6025def pkcs1pad(data, length):
6026 """
6027 Padding input data with PKCS#1 scheme
6028
6029 @param {int[]} data input data
6030 @param {int} length target length
6031 @returns {int[]} padded data
6032 """
6033 if len(data) > length - 11:
6034 raise ValueError('Input data too long for PKCS#1 padding')
6035
6036 pseudo_random = [random.randint(0, 254) for _ in range(length - len(data) - 3)]
6037 return [0, 2] + pseudo_random + [0] + data
6038
6039
5eb6bdce 6040def encode_base_n(num, n, table=None):
59f898b7 6041 FULL_TABLE = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
59f898b7
YCH
6042 if not table:
6043 table = FULL_TABLE[:n]
6044
5eb6bdce
YCH
6045 if n > len(table):
6046 raise ValueError('base %d exceeds table length %d' % (n, len(table)))
6047
6048 if num == 0:
6049 return table[0]
6050
81bdc8fd
YCH
6051 ret = ''
6052 while num:
6053 ret = table[num % n] + ret
6054 num = num // n
6055 return ret
f52354a8
YCH
6056
6057
6058def decode_packed_codes(code):
06b3fe29 6059 mobj = re.search(PACKED_CODES_RE, code)
a0566bbf 6060 obfuscated_code, base, count, symbols = mobj.groups()
f52354a8
YCH
6061 base = int(base)
6062 count = int(count)
6063 symbols = symbols.split('|')
6064 symbol_table = {}
6065
6066 while count:
6067 count -= 1
5eb6bdce 6068 base_n_count = encode_base_n(count, base)
f52354a8
YCH
6069 symbol_table[base_n_count] = symbols[count] or base_n_count
6070
6071 return re.sub(
6072 r'\b(\w+)\b', lambda mobj: symbol_table[mobj.group(0)],
a0566bbf 6073 obfuscated_code)
e154c651 6074
6075
1ced2221
S
6076def caesar(s, alphabet, shift):
6077 if shift == 0:
6078 return s
6079 l = len(alphabet)
6080 return ''.join(
6081 alphabet[(alphabet.index(c) + shift) % l] if c in alphabet else c
6082 for c in s)
6083
6084
6085def rot47(s):
6086 return caesar(s, r'''!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~''', 47)
6087
6088
e154c651 6089def parse_m3u8_attributes(attrib):
6090 info = {}
6091 for (key, val) in re.findall(r'(?P<key>[A-Z0-9-]+)=(?P<val>"[^"]+"|[^",]+)(?:,|$)', attrib):
6092 if val.startswith('"'):
6093 val = val[1:-1]
6094 info[key] = val
6095 return info
1143535d
YCH
6096
6097
6098def urshift(val, n):
6099 return val >> n if val >= 0 else (val + 0x100000000) >> n
d3f8e038
YCH
6100
6101
6102# Based on png2str() written by @gdkchan and improved by @yokrysty
067aa17e 6103# Originally posted at https://github.com/ytdl-org/youtube-dl/issues/9706
d3f8e038
YCH
6104def decode_png(png_data):
6105 # Reference: https://www.w3.org/TR/PNG/
6106 header = png_data[8:]
6107
6108 if png_data[:8] != b'\x89PNG\x0d\x0a\x1a\x0a' or header[4:8] != b'IHDR':
6109 raise IOError('Not a valid PNG file.')
6110
6111 int_map = {1: '>B', 2: '>H', 4: '>I'}
6112 unpack_integer = lambda x: compat_struct_unpack(int_map[len(x)], x)[0]
6113
6114 chunks = []
6115
6116 while header:
6117 length = unpack_integer(header[:4])
6118 header = header[4:]
6119
6120 chunk_type = header[:4]
6121 header = header[4:]
6122
6123 chunk_data = header[:length]
6124 header = header[length:]
6125
6126 header = header[4:] # Skip CRC
6127
6128 chunks.append({
6129 'type': chunk_type,
6130 'length': length,
6131 'data': chunk_data
6132 })
6133
6134 ihdr = chunks[0]['data']
6135
6136 width = unpack_integer(ihdr[:4])
6137 height = unpack_integer(ihdr[4:8])
6138
6139 idat = b''
6140
6141 for chunk in chunks:
6142 if chunk['type'] == b'IDAT':
6143 idat += chunk['data']
6144
6145 if not idat:
6146 raise IOError('Unable to read PNG data.')
6147
6148 decompressed_data = bytearray(zlib.decompress(idat))
6149
6150 stride = width * 3
6151 pixels = []
6152
6153 def _get_pixel(idx):
6154 x = idx % stride
6155 y = idx // stride
6156 return pixels[y][x]
6157
6158 for y in range(height):
6159 basePos = y * (1 + stride)
6160 filter_type = decompressed_data[basePos]
6161
6162 current_row = []
6163
6164 pixels.append(current_row)
6165
6166 for x in range(stride):
6167 color = decompressed_data[1 + basePos + x]
6168 basex = y * stride + x
6169 left = 0
6170 up = 0
6171
6172 if x > 2:
6173 left = _get_pixel(basex - 3)
6174 if y > 0:
6175 up = _get_pixel(basex - stride)
6176
6177 if filter_type == 1: # Sub
6178 color = (color + left) & 0xff
6179 elif filter_type == 2: # Up
6180 color = (color + up) & 0xff
6181 elif filter_type == 3: # Average
6182 color = (color + ((left + up) >> 1)) & 0xff
6183 elif filter_type == 4: # Paeth
6184 a = left
6185 b = up
6186 c = 0
6187
6188 if x > 2 and y > 0:
6189 c = _get_pixel(basex - stride - 3)
6190
6191 p = a + b - c
6192
6193 pa = abs(p - a)
6194 pb = abs(p - b)
6195 pc = abs(p - c)
6196
6197 if pa <= pb and pa <= pc:
6198 color = (color + a) & 0xff
6199 elif pb <= pc:
6200 color = (color + b) & 0xff
6201 else:
6202 color = (color + c) & 0xff
6203
6204 current_row.append(color)
6205
6206 return width, height, pixels
efa97bdc
YCH
6207
6208
6209def write_xattr(path, key, value):
6210 # This mess below finds the best xattr tool for the job
6211 try:
6212 # try the pyxattr module...
6213 import xattr
6214
53a7e3d2
YCH
6215 if hasattr(xattr, 'set'): # pyxattr
6216 # Unicode arguments are not supported in python-pyxattr until
6217 # version 0.5.0
067aa17e 6218 # See https://github.com/ytdl-org/youtube-dl/issues/5498
53a7e3d2
YCH
6219 pyxattr_required_version = '0.5.0'
6220 if version_tuple(xattr.__version__) < version_tuple(pyxattr_required_version):
6221 # TODO: fallback to CLI tools
6222 raise XAttrUnavailableError(
6223 'python-pyxattr is detected but is too old. '
7a5c1cfe 6224 'yt-dlp requires %s or above while your version is %s. '
53a7e3d2
YCH
6225 'Falling back to other xattr implementations' % (
6226 pyxattr_required_version, xattr.__version__))
6227
6228 setxattr = xattr.set
6229 else: # xattr
6230 setxattr = xattr.setxattr
efa97bdc
YCH
6231
6232 try:
53a7e3d2 6233 setxattr(path, key, value)
efa97bdc
YCH
6234 except EnvironmentError as e:
6235 raise XAttrMetadataError(e.errno, e.strerror)
6236
6237 except ImportError:
6238 if compat_os_name == 'nt':
6239 # Write xattrs to NTFS Alternate Data Streams:
6240 # http://en.wikipedia.org/wiki/NTFS#Alternate_data_streams_.28ADS.29
6241 assert ':' not in key
6242 assert os.path.exists(path)
6243
6244 ads_fn = path + ':' + key
6245 try:
6246 with open(ads_fn, 'wb') as f:
6247 f.write(value)
6248 except EnvironmentError as e:
6249 raise XAttrMetadataError(e.errno, e.strerror)
6250 else:
6251 user_has_setfattr = check_executable('setfattr', ['--version'])
6252 user_has_xattr = check_executable('xattr', ['-h'])
6253
6254 if user_has_setfattr or user_has_xattr:
6255
6256 value = value.decode('utf-8')
6257 if user_has_setfattr:
6258 executable = 'setfattr'
6259 opts = ['-n', key, '-v', value]
6260 elif user_has_xattr:
6261 executable = 'xattr'
6262 opts = ['-w', key, value]
6263
3089bc74
S
6264 cmd = ([encodeFilename(executable, True)]
6265 + [encodeArgument(o) for o in opts]
6266 + [encodeFilename(path, True)])
efa97bdc
YCH
6267
6268 try:
d3c93ec2 6269 p = Popen(
efa97bdc
YCH
6270 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
6271 except EnvironmentError as e:
6272 raise XAttrMetadataError(e.errno, e.strerror)
d3c93ec2 6273 stdout, stderr = p.communicate_or_kill()
efa97bdc
YCH
6274 stderr = stderr.decode('utf-8', 'replace')
6275 if p.returncode != 0:
6276 raise XAttrMetadataError(p.returncode, stderr)
6277
6278 else:
6279 # On Unix, and can't find pyxattr, setfattr, or xattr.
6280 if sys.platform.startswith('linux'):
6281 raise XAttrUnavailableError(
6282 "Couldn't find a tool to set the xattrs. "
6283 "Install either the python 'pyxattr' or 'xattr' "
6284 "modules, or the GNU 'attr' package "
6285 "(which contains the 'setfattr' tool).")
6286 else:
6287 raise XAttrUnavailableError(
6288 "Couldn't find a tool to set the xattrs. "
6289 "Install either the python 'xattr' module, "
6290 "or the 'xattr' binary.")
0c265486
YCH
6291
6292
6293def random_birthday(year_field, month_field, day_field):
aa374bc7
AS
6294 start_date = datetime.date(1950, 1, 1)
6295 end_date = datetime.date(1995, 12, 31)
6296 offset = random.randint(0, (end_date - start_date).days)
6297 random_date = start_date + datetime.timedelta(offset)
0c265486 6298 return {
aa374bc7
AS
6299 year_field: str(random_date.year),
6300 month_field: str(random_date.month),
6301 day_field: str(random_date.day),
0c265486 6302 }
732044af 6303
c76eb41b 6304
732044af 6305# Templates for internet shortcut files, which are plain text files.
6306DOT_URL_LINK_TEMPLATE = '''
6307[InternetShortcut]
6308URL=%(url)s
6309'''.lstrip()
6310
6311DOT_WEBLOC_LINK_TEMPLATE = '''
6312<?xml version="1.0" encoding="UTF-8"?>
6313<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
6314<plist version="1.0">
6315<dict>
6316\t<key>URL</key>
6317\t<string>%(url)s</string>
6318</dict>
6319</plist>
6320'''.lstrip()
6321
6322DOT_DESKTOP_LINK_TEMPLATE = '''
6323[Desktop Entry]
6324Encoding=UTF-8
6325Name=%(filename)s
6326Type=Link
6327URL=%(url)s
6328Icon=text-html
6329'''.lstrip()
6330
08438d2c 6331LINK_TEMPLATES = {
6332 'url': DOT_URL_LINK_TEMPLATE,
6333 'desktop': DOT_DESKTOP_LINK_TEMPLATE,
6334 'webloc': DOT_WEBLOC_LINK_TEMPLATE,
6335}
6336
732044af 6337
6338def iri_to_uri(iri):
6339 """
6340 Converts an IRI (Internationalized Resource Identifier, allowing Unicode characters) to a URI (Uniform Resource Identifier, ASCII-only).
6341
6342 The function doesn't add an additional layer of escaping; e.g., it doesn't escape `%3C` as `%253C`. Instead, it percent-escapes characters with an underlying UTF-8 encoding *besides* those already escaped, leaving the URI intact.
6343 """
6344
6345 iri_parts = compat_urllib_parse_urlparse(iri)
6346
6347 if '[' in iri_parts.netloc:
6348 raise ValueError('IPv6 URIs are not, yet, supported.')
6349 # Querying `.netloc`, when there's only one bracket, also raises a ValueError.
6350
6351 # The `safe` argument values, that the following code uses, contain the characters that should not be percent-encoded. Everything else but letters, digits and '_.-' will be percent-encoded with an underlying UTF-8 encoding. Everything already percent-encoded will be left as is.
6352
6353 net_location = ''
6354 if iri_parts.username:
6355 net_location += compat_urllib_parse_quote(iri_parts.username, safe=r"!$%&'()*+,~")
6356 if iri_parts.password is not None:
6357 net_location += ':' + compat_urllib_parse_quote(iri_parts.password, safe=r"!$%&'()*+,~")
6358 net_location += '@'
6359
6360 net_location += iri_parts.hostname.encode('idna').decode('utf-8') # Punycode for Unicode hostnames.
6361 # The 'idna' encoding produces ASCII text.
6362 if iri_parts.port is not None and iri_parts.port != 80:
6363 net_location += ':' + str(iri_parts.port)
6364
6365 return compat_urllib_parse_urlunparse(
6366 (iri_parts.scheme,
6367 net_location,
6368
6369 compat_urllib_parse_quote_plus(iri_parts.path, safe=r"!$%&'()*+,/:;=@|~"),
6370
6371 # Unsure about the `safe` argument, since this is a legacy way of handling parameters.
6372 compat_urllib_parse_quote_plus(iri_parts.params, safe=r"!$%&'()*+,/:;=@|~"),
6373
6374 # Not totally sure about the `safe` argument, since the source does not explicitly mention the query URI component.
6375 compat_urllib_parse_quote_plus(iri_parts.query, safe=r"!$%&'()*+,/:;=?@{|}~"),
6376
6377 compat_urllib_parse_quote_plus(iri_parts.fragment, safe=r"!#$%&'()*+,/:;=?@{|}~")))
6378
6379 # Source for `safe` arguments: https://url.spec.whatwg.org/#percent-encoded-bytes.
6380
6381
6382def to_high_limit_path(path):
6383 if sys.platform in ['win32', 'cygwin']:
6384 # Work around MAX_PATH limitation on Windows. The maximum allowed length for the individual path segments may still be quite limited.
6385 return r'\\?\ '.rstrip() + os.path.abspath(path)
6386
6387 return path
76d321f6 6388
c76eb41b 6389
b868936c 6390def format_field(obj, field=None, template='%s', ignore=(None, ''), default='', func=None):
6391 if field is None:
6392 val = obj if obj is not None else default
6393 else:
6394 val = obj.get(field, default)
76d321f6 6395 if func and val not in ignore:
6396 val = func(val)
6397 return template % val if val not in ignore else default
00dd0cd5 6398
6399
6400def clean_podcast_url(url):
6401 return re.sub(r'''(?x)
6402 (?:
6403 (?:
6404 chtbl\.com/track|
6405 media\.blubrry\.com| # https://create.blubrry.com/resources/podcast-media-download-statistics/getting-started/
6406 play\.podtrac\.com
6407 )/[^/]+|
6408 (?:dts|www)\.podtrac\.com/(?:pts/)?redirect\.[0-9a-z]{3,4}| # http://analytics.podtrac.com/how-to-measure
6409 flex\.acast\.com|
6410 pd(?:
6411 cn\.co| # https://podcorn.com/analytics-prefix/
6412 st\.fm # https://podsights.com/docs/
6413 )/e
6414 )/''', '', url)
ffcb8191
THD
6415
6416
6417_HEX_TABLE = '0123456789abcdef'
6418
6419
6420def random_uuidv4():
6421 return re.sub(r'[xy]', lambda x: _HEX_TABLE[random.randint(0, 15)], 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx')
0202b52a 6422
6423
6424def make_dir(path, to_screen=None):
6425 try:
6426 dn = os.path.dirname(path)
6427 if dn and not os.path.exists(dn):
6428 os.makedirs(dn)
6429 return True
6430 except (OSError, IOError) as err:
6431 if callable(to_screen) is not None:
6432 to_screen('unable to create directory ' + error_to_compat_str(err))
6433 return False
f74980cb 6434
6435
6436def get_executable_path():
c552ae88 6437 from zipimport import zipimporter
6438 if hasattr(sys, 'frozen'): # Running from PyInstaller
6439 path = os.path.dirname(sys.executable)
6440 elif isinstance(globals().get('__loader__'), zipimporter): # Running from ZIP
6441 path = os.path.join(os.path.dirname(__file__), '../..')
6442 else:
6443 path = os.path.join(os.path.dirname(__file__), '..')
f74980cb 6444 return os.path.abspath(path)
6445
6446
2f567473 6447def load_plugins(name, suffix, namespace):
3ae5e797 6448 classes = {}
f74980cb 6449 try:
019a94f7
ÁS
6450 plugins_spec = importlib.util.spec_from_file_location(
6451 name, os.path.join(get_executable_path(), 'ytdlp_plugins', name, '__init__.py'))
6452 plugins = importlib.util.module_from_spec(plugins_spec)
6453 sys.modules[plugins_spec.name] = plugins
6454 plugins_spec.loader.exec_module(plugins)
f74980cb 6455 for name in dir(plugins):
2f567473 6456 if name in namespace:
6457 continue
6458 if not name.endswith(suffix):
f74980cb 6459 continue
6460 klass = getattr(plugins, name)
3ae5e797 6461 classes[name] = namespace[name] = klass
019a94f7 6462 except FileNotFoundError:
f74980cb 6463 pass
f74980cb 6464 return classes
06167fbb 6465
6466
325ebc17 6467def traverse_obj(
352d63fd 6468 obj, *path_list, default=None, expected_type=None, get_all=True,
325ebc17 6469 casesense=True, is_user_input=False, traverse_string=False):
324ad820 6470 ''' Traverse nested list/dict/tuple
8f334380 6471 @param path_list A list of paths which are checked one by one.
6472 Each path is a list of keys where each key is a string,
2614f646 6473 a function, a tuple of strings or "...".
6474 When a fuction is given, it takes the key as argument and
6475 returns whether the key matches or not. When a tuple is given,
8f334380 6476 all the keys given in the tuple are traversed, and
6477 "..." traverses all the keys in the object
325ebc17 6478 @param default Default value to return
352d63fd 6479 @param expected_type Only accept final value of this type (Can also be any callable)
6480 @param get_all Return all the values obtained from a path or only the first one
324ad820 6481 @param casesense Whether to consider dictionary keys as case sensitive
6482 @param is_user_input Whether the keys are generated from user input. If True,
6483 strings are converted to int/slice if necessary
6484 @param traverse_string Whether to traverse inside strings. If True, any
6485 non-compatible object will also be converted into a string
8f334380 6486 # TODO: Write tests
324ad820 6487 '''
325ebc17 6488 if not casesense:
dbf5416a 6489 _lower = lambda k: (k.lower() if isinstance(k, str) else k)
8f334380 6490 path_list = (map(_lower, variadic(path)) for path in path_list)
6491
6492 def _traverse_obj(obj, path, _current_depth=0):
6493 nonlocal depth
6494 path = tuple(variadic(path))
6495 for i, key in enumerate(path):
582fad70 6496 if obj is None:
6497 return None
8f334380 6498 if isinstance(key, (list, tuple)):
6499 obj = [_traverse_obj(obj, sub_key, _current_depth) for sub_key in key]
6500 key = ...
6501 if key is ...:
6502 obj = (obj.values() if isinstance(obj, dict)
6503 else obj if isinstance(obj, (list, tuple, LazyList))
6504 else str(obj) if traverse_string else [])
6505 _current_depth += 1
6506 depth = max(depth, _current_depth)
6507 return [_traverse_obj(inner_obj, path[i + 1:], _current_depth) for inner_obj in obj]
2614f646 6508 elif callable(key):
6509 if isinstance(obj, (list, tuple, LazyList)):
6510 obj = enumerate(obj)
6511 elif isinstance(obj, dict):
6512 obj = obj.items()
6513 else:
6514 if not traverse_string:
6515 return None
6516 obj = str(obj)
6517 _current_depth += 1
6518 depth = max(depth, _current_depth)
6519 return [_traverse_obj(v, path[i + 1:], _current_depth) for k, v in obj if key(k)]
575e17a1 6520 elif isinstance(obj, dict) and not (is_user_input and key == ':'):
325ebc17 6521 obj = (obj.get(key) if casesense or (key in obj)
6522 else next((v for k, v in obj.items() if _lower(k) == key), None))
6523 else:
6524 if is_user_input:
6525 key = (int_or_none(key) if ':' not in key
6526 else slice(*map(int_or_none, key.split(':'))))
8f334380 6527 if key == slice(None):
575e17a1 6528 return _traverse_obj(obj, (..., *path[i + 1:]), _current_depth)
325ebc17 6529 if not isinstance(key, (int, slice)):
9fea350f 6530 return None
8f334380 6531 if not isinstance(obj, (list, tuple, LazyList)):
325ebc17 6532 if not traverse_string:
6533 return None
6534 obj = str(obj)
6535 try:
6536 obj = obj[key]
6537 except IndexError:
324ad820 6538 return None
325ebc17 6539 return obj
6540
352d63fd 6541 if isinstance(expected_type, type):
6542 type_test = lambda val: val if isinstance(val, expected_type) else None
6543 elif expected_type is not None:
6544 type_test = expected_type
6545 else:
6546 type_test = lambda val: val
6547
8f334380 6548 for path in path_list:
6549 depth = 0
6550 val = _traverse_obj(obj, path)
325ebc17 6551 if val is not None:
8f334380 6552 if depth:
6553 for _ in range(depth - 1):
6586bca9 6554 val = itertools.chain.from_iterable(v for v in val if v is not None)
352d63fd 6555 val = [v for v in map(type_test, val) if v is not None]
8f334380 6556 if val:
352d63fd 6557 return val if get_all else val[0]
6558 else:
6559 val = type_test(val)
6560 if val is not None:
8f334380 6561 return val
325ebc17 6562 return default
324ad820 6563
6564
ee8dd27a 6565# Deprecated
324ad820 6566def traverse_dict(dictn, keys, casesense=True):
ee8dd27a 6567 write_string('DeprecationWarning: yt_dlp.utils.traverse_dict is deprecated '
6568 'and may be removed in a future version. Use yt_dlp.utils.traverse_obj instead')
6569 return traverse_obj(dictn, keys, casesense=casesense, is_user_input=True, traverse_string=True)
6606817a 6570
6571
c634ad2a 6572def variadic(x, allowed_types=(str, bytes)):
cb89cfc1 6573 return x if isinstance(x, collections.abc.Iterable) and not isinstance(x, allowed_types) else (x,)
bd50a52b
THD
6574
6575
49fa4d9a
N
6576# create a JSON Web Signature (jws) with HS256 algorithm
6577# the resulting format is in JWS Compact Serialization
6578# implemented following JWT https://www.rfc-editor.org/rfc/rfc7519.html
6579# implemented following JWS https://www.rfc-editor.org/rfc/rfc7515.html
6580def jwt_encode_hs256(payload_data, key, headers={}):
6581 header_data = {
6582 'alg': 'HS256',
6583 'typ': 'JWT',
6584 }
6585 if headers:
6586 header_data.update(headers)
6587 header_b64 = base64.b64encode(json.dumps(header_data).encode('utf-8'))
6588 payload_b64 = base64.b64encode(json.dumps(payload_data).encode('utf-8'))
6589 h = hmac.new(key.encode('utf-8'), header_b64 + b'.' + payload_b64, hashlib.sha256)
6590 signature_b64 = base64.b64encode(h.digest())
6591 token = header_b64 + b'.' + payload_b64 + b'.' + signature_b64
6592 return token
819e0531 6593
6594
16b0d7e6 6595# can be extended in future to verify the signature and parse header and return the algorithm used if it's not HS256
6596def jwt_decode_hs256(jwt):
6597 header_b64, payload_b64, signature_b64 = jwt.split('.')
6598 payload_data = json.loads(base64.urlsafe_b64decode(payload_b64))
6599 return payload_data
6600
6601
819e0531 6602def supports_terminal_sequences(stream):
6603 if compat_os_name == 'nt':
e3c7d495 6604 from .compat import WINDOWS_VT_MODE # Must be imported locally
6605 if not WINDOWS_VT_MODE or get_windows_version() < (10, 0, 10586):
819e0531 6606 return False
6607 elif not os.getenv('TERM'):
6608 return False
6609 try:
6610 return stream.isatty()
6611 except BaseException:
6612 return False
6613
6614
ec11a9f4 6615_terminal_sequences_re = re.compile('\033\\[[^m]+m')
6616
6617
6618def remove_terminal_sequences(string):
6619 return _terminal_sequences_re.sub('', string)
6620
6621
6622def number_of_digits(number):
6623 return len('%d' % number)
34921b43 6624
6625
6626def join_nonempty(*values, delim='-', from_dict=None):
6627 if from_dict is not None:
c586f9e8 6628 values = map(from_dict.get, values)
34921b43 6629 return delim.join(map(str, filter(None, values)))