]> jfr.im git - yt-dlp.git/blame - yt_dlp/utils.py
[extractor,cleanup] Use `_search_nextjs_data`
[yt-dlp.git] / yt_dlp / utils.py
CommitLineData
cc52de43 1#!/usr/bin/env python3
dcdb292f 2# coding: utf-8
d77c3dfd 3
ecc0c5ee
PH
4from __future__ import unicode_literals
5
1e399778 6import base64
5bc880b9 7import binascii
912b38b4 8import calendar
676eb3f2 9import codecs
c380cc28 10import collections
62e609ab 11import contextlib
e3946f98 12import ctypes
c496ca96
PH
13import datetime
14import email.utils
0c265486 15import email.header
f45c185f 16import errno
be4a824d 17import functools
d77c3dfd 18import gzip
49fa4d9a
N
19import hashlib
20import hmac
019a94f7 21import importlib.util
03f9daab 22import io
79a2e94e 23import itertools
f4bfd65f 24import json
d77c3dfd 25import locale
02dbf93f 26import math
347de493 27import operator
d77c3dfd 28import os
c496ca96 29import platform
773f291d 30import random
d77c3dfd 31import re
c496ca96 32import socket
79a2e94e 33import ssl
1c088fa8 34import subprocess
d77c3dfd 35import sys
181c8655 36import tempfile
c380cc28 37import time
01951dda 38import traceback
bcf89ce6 39import xml.etree.ElementTree
d77c3dfd 40import zlib
2814f12b 41import mimetypes
d77c3dfd 42
8c25f81b 43from .compat import (
b4a3d461 44 compat_HTMLParseError,
8bb56eee 45 compat_HTMLParser,
201c1459 46 compat_HTTPError,
8f9312c3 47 compat_basestring,
8c25f81b 48 compat_chr,
1bab3437 49 compat_cookiejar,
d7cd9a9e 50 compat_ctypes_WINFUNCTYPE,
36e6f62c 51 compat_etree_fromstring,
51098426 52 compat_expanduser,
8c25f81b 53 compat_html_entities,
55b2f099 54 compat_html_entities_html5,
be4a824d 55 compat_http_client,
42db58ec 56 compat_integer_types,
e29663c6 57 compat_numeric_types,
c86b6142 58 compat_kwargs,
efa97bdc 59 compat_os_name,
8c25f81b 60 compat_parse_qs,
06e57990 61 compat_shlex_split,
702ccf2d 62 compat_shlex_quote,
8c25f81b 63 compat_str,
edaa23f8 64 compat_struct_pack,
d3f8e038 65 compat_struct_unpack,
8c25f81b
PH
66 compat_urllib_error,
67 compat_urllib_parse,
15707c7e 68 compat_urllib_parse_urlencode,
8c25f81b 69 compat_urllib_parse_urlparse,
732044af 70 compat_urllib_parse_urlunparse,
71 compat_urllib_parse_quote,
72 compat_urllib_parse_quote_plus,
7581bfc9 73 compat_urllib_parse_unquote_plus,
8c25f81b
PH
74 compat_urllib_request,
75 compat_urlparse,
810c10ba 76 compat_xpath,
8c25f81b 77)
4644ac55 78
71aff188
YCH
79from .socks import (
80 ProxyType,
81 sockssocket,
82)
83
4644ac55 84
51fb4995
YCH
85def register_socks_protocols():
86 # "Register" SOCKS protocols
d5ae6bb5
YCH
87 # In Python < 2.6.5, urlsplit() suffers from bug https://bugs.python.org/issue7904
88 # URLs with protocols not in urlparse.uses_netloc are not handled correctly
51fb4995
YCH
89 for scheme in ('socks', 'socks4', 'socks4a', 'socks5'):
90 if scheme not in compat_urlparse.uses_netloc:
91 compat_urlparse.uses_netloc.append(scheme)
92
93
468e2e92
FV
94# This is not clearly defined otherwise
95compiled_regex_type = type(re.compile(''))
96
f7a147e3
S
97
98def random_user_agent():
99 _USER_AGENT_TPL = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36'
100 _CHROME_VERSIONS = (
d6579d53 101 '74.0.3729.129',
102 '76.0.3780.3',
103 '76.0.3780.2',
104 '74.0.3729.128',
105 '76.0.3780.1',
106 '76.0.3780.0',
107 '75.0.3770.15',
108 '74.0.3729.127',
109 '74.0.3729.126',
110 '76.0.3779.1',
111 '76.0.3779.0',
112 '75.0.3770.14',
113 '74.0.3729.125',
114 '76.0.3778.1',
115 '76.0.3778.0',
116 '75.0.3770.13',
117 '74.0.3729.124',
118 '74.0.3729.123',
119 '73.0.3683.121',
120 '76.0.3777.1',
121 '76.0.3777.0',
122 '75.0.3770.12',
123 '74.0.3729.122',
124 '76.0.3776.4',
125 '75.0.3770.11',
126 '74.0.3729.121',
127 '76.0.3776.3',
128 '76.0.3776.2',
129 '73.0.3683.120',
130 '74.0.3729.120',
131 '74.0.3729.119',
132 '74.0.3729.118',
133 '76.0.3776.1',
134 '76.0.3776.0',
135 '76.0.3775.5',
136 '75.0.3770.10',
137 '74.0.3729.117',
138 '76.0.3775.4',
139 '76.0.3775.3',
140 '74.0.3729.116',
141 '75.0.3770.9',
142 '76.0.3775.2',
143 '76.0.3775.1',
144 '76.0.3775.0',
145 '75.0.3770.8',
146 '74.0.3729.115',
147 '74.0.3729.114',
148 '76.0.3774.1',
149 '76.0.3774.0',
150 '75.0.3770.7',
151 '74.0.3729.113',
152 '74.0.3729.112',
153 '74.0.3729.111',
154 '76.0.3773.1',
155 '76.0.3773.0',
156 '75.0.3770.6',
157 '74.0.3729.110',
158 '74.0.3729.109',
159 '76.0.3772.1',
160 '76.0.3772.0',
161 '75.0.3770.5',
162 '74.0.3729.108',
163 '74.0.3729.107',
164 '76.0.3771.1',
165 '76.0.3771.0',
166 '75.0.3770.4',
167 '74.0.3729.106',
168 '74.0.3729.105',
169 '75.0.3770.3',
170 '74.0.3729.104',
171 '74.0.3729.103',
172 '74.0.3729.102',
173 '75.0.3770.2',
174 '74.0.3729.101',
175 '75.0.3770.1',
176 '75.0.3770.0',
177 '74.0.3729.100',
178 '75.0.3769.5',
179 '75.0.3769.4',
180 '74.0.3729.99',
181 '75.0.3769.3',
182 '75.0.3769.2',
183 '75.0.3768.6',
184 '74.0.3729.98',
185 '75.0.3769.1',
186 '75.0.3769.0',
187 '74.0.3729.97',
188 '73.0.3683.119',
189 '73.0.3683.118',
190 '74.0.3729.96',
191 '75.0.3768.5',
192 '75.0.3768.4',
193 '75.0.3768.3',
194 '75.0.3768.2',
195 '74.0.3729.95',
196 '74.0.3729.94',
197 '75.0.3768.1',
198 '75.0.3768.0',
199 '74.0.3729.93',
200 '74.0.3729.92',
201 '73.0.3683.117',
202 '74.0.3729.91',
203 '75.0.3766.3',
204 '74.0.3729.90',
205 '75.0.3767.2',
206 '75.0.3767.1',
207 '75.0.3767.0',
208 '74.0.3729.89',
209 '73.0.3683.116',
210 '75.0.3766.2',
211 '74.0.3729.88',
212 '75.0.3766.1',
213 '75.0.3766.0',
214 '74.0.3729.87',
215 '73.0.3683.115',
216 '74.0.3729.86',
217 '75.0.3765.1',
218 '75.0.3765.0',
219 '74.0.3729.85',
220 '73.0.3683.114',
221 '74.0.3729.84',
222 '75.0.3764.1',
223 '75.0.3764.0',
224 '74.0.3729.83',
225 '73.0.3683.113',
226 '75.0.3763.2',
227 '75.0.3761.4',
228 '74.0.3729.82',
229 '75.0.3763.1',
230 '75.0.3763.0',
231 '74.0.3729.81',
232 '73.0.3683.112',
233 '75.0.3762.1',
234 '75.0.3762.0',
235 '74.0.3729.80',
236 '75.0.3761.3',
237 '74.0.3729.79',
238 '73.0.3683.111',
239 '75.0.3761.2',
240 '74.0.3729.78',
241 '74.0.3729.77',
242 '75.0.3761.1',
243 '75.0.3761.0',
244 '73.0.3683.110',
245 '74.0.3729.76',
246 '74.0.3729.75',
247 '75.0.3760.0',
248 '74.0.3729.74',
249 '75.0.3759.8',
250 '75.0.3759.7',
251 '75.0.3759.6',
252 '74.0.3729.73',
253 '75.0.3759.5',
254 '74.0.3729.72',
255 '73.0.3683.109',
256 '75.0.3759.4',
257 '75.0.3759.3',
258 '74.0.3729.71',
259 '75.0.3759.2',
260 '74.0.3729.70',
261 '73.0.3683.108',
262 '74.0.3729.69',
263 '75.0.3759.1',
264 '75.0.3759.0',
265 '74.0.3729.68',
266 '73.0.3683.107',
267 '74.0.3729.67',
268 '75.0.3758.1',
269 '75.0.3758.0',
270 '74.0.3729.66',
271 '73.0.3683.106',
272 '74.0.3729.65',
273 '75.0.3757.1',
274 '75.0.3757.0',
275 '74.0.3729.64',
276 '73.0.3683.105',
277 '74.0.3729.63',
278 '75.0.3756.1',
279 '75.0.3756.0',
280 '74.0.3729.62',
281 '73.0.3683.104',
282 '75.0.3755.3',
283 '75.0.3755.2',
284 '73.0.3683.103',
285 '75.0.3755.1',
286 '75.0.3755.0',
287 '74.0.3729.61',
288 '73.0.3683.102',
289 '74.0.3729.60',
290 '75.0.3754.2',
291 '74.0.3729.59',
292 '75.0.3753.4',
293 '74.0.3729.58',
294 '75.0.3754.1',
295 '75.0.3754.0',
296 '74.0.3729.57',
297 '73.0.3683.101',
298 '75.0.3753.3',
299 '75.0.3752.2',
300 '75.0.3753.2',
301 '74.0.3729.56',
302 '75.0.3753.1',
303 '75.0.3753.0',
304 '74.0.3729.55',
305 '73.0.3683.100',
306 '74.0.3729.54',
307 '75.0.3752.1',
308 '75.0.3752.0',
309 '74.0.3729.53',
310 '73.0.3683.99',
311 '74.0.3729.52',
312 '75.0.3751.1',
313 '75.0.3751.0',
314 '74.0.3729.51',
315 '73.0.3683.98',
316 '74.0.3729.50',
317 '75.0.3750.0',
318 '74.0.3729.49',
319 '74.0.3729.48',
320 '74.0.3729.47',
321 '75.0.3749.3',
322 '74.0.3729.46',
323 '73.0.3683.97',
324 '75.0.3749.2',
325 '74.0.3729.45',
326 '75.0.3749.1',
327 '75.0.3749.0',
328 '74.0.3729.44',
329 '73.0.3683.96',
330 '74.0.3729.43',
331 '74.0.3729.42',
332 '75.0.3748.1',
333 '75.0.3748.0',
334 '74.0.3729.41',
335 '75.0.3747.1',
336 '73.0.3683.95',
337 '75.0.3746.4',
338 '74.0.3729.40',
339 '74.0.3729.39',
340 '75.0.3747.0',
341 '75.0.3746.3',
342 '75.0.3746.2',
343 '74.0.3729.38',
344 '75.0.3746.1',
345 '75.0.3746.0',
346 '74.0.3729.37',
347 '73.0.3683.94',
348 '75.0.3745.5',
349 '75.0.3745.4',
350 '75.0.3745.3',
351 '75.0.3745.2',
352 '74.0.3729.36',
353 '75.0.3745.1',
354 '75.0.3745.0',
355 '75.0.3744.2',
356 '74.0.3729.35',
357 '73.0.3683.93',
358 '74.0.3729.34',
359 '75.0.3744.1',
360 '75.0.3744.0',
361 '74.0.3729.33',
362 '73.0.3683.92',
363 '74.0.3729.32',
364 '74.0.3729.31',
365 '73.0.3683.91',
366 '75.0.3741.2',
367 '75.0.3740.5',
368 '74.0.3729.30',
369 '75.0.3741.1',
370 '75.0.3741.0',
371 '74.0.3729.29',
372 '75.0.3740.4',
373 '73.0.3683.90',
374 '74.0.3729.28',
375 '75.0.3740.3',
376 '73.0.3683.89',
377 '75.0.3740.2',
378 '74.0.3729.27',
379 '75.0.3740.1',
380 '75.0.3740.0',
381 '74.0.3729.26',
382 '73.0.3683.88',
383 '73.0.3683.87',
384 '74.0.3729.25',
385 '75.0.3739.1',
386 '75.0.3739.0',
387 '73.0.3683.86',
388 '74.0.3729.24',
389 '73.0.3683.85',
390 '75.0.3738.4',
391 '75.0.3738.3',
392 '75.0.3738.2',
393 '75.0.3738.1',
394 '75.0.3738.0',
395 '74.0.3729.23',
396 '73.0.3683.84',
397 '74.0.3729.22',
398 '74.0.3729.21',
399 '75.0.3737.1',
400 '75.0.3737.0',
401 '74.0.3729.20',
402 '73.0.3683.83',
403 '74.0.3729.19',
404 '75.0.3736.1',
405 '75.0.3736.0',
406 '74.0.3729.18',
407 '73.0.3683.82',
408 '74.0.3729.17',
409 '75.0.3735.1',
410 '75.0.3735.0',
411 '74.0.3729.16',
412 '73.0.3683.81',
413 '75.0.3734.1',
414 '75.0.3734.0',
415 '74.0.3729.15',
416 '73.0.3683.80',
417 '74.0.3729.14',
418 '75.0.3733.1',
419 '75.0.3733.0',
420 '75.0.3732.1',
421 '74.0.3729.13',
422 '74.0.3729.12',
423 '73.0.3683.79',
424 '74.0.3729.11',
425 '75.0.3732.0',
426 '74.0.3729.10',
427 '73.0.3683.78',
428 '74.0.3729.9',
429 '74.0.3729.8',
430 '74.0.3729.7',
431 '75.0.3731.3',
432 '75.0.3731.2',
433 '75.0.3731.0',
434 '74.0.3729.6',
435 '73.0.3683.77',
436 '73.0.3683.76',
437 '75.0.3730.5',
438 '75.0.3730.4',
439 '73.0.3683.75',
440 '74.0.3729.5',
441 '73.0.3683.74',
442 '75.0.3730.3',
443 '75.0.3730.2',
444 '74.0.3729.4',
445 '73.0.3683.73',
446 '73.0.3683.72',
447 '75.0.3730.1',
448 '75.0.3730.0',
449 '74.0.3729.3',
450 '73.0.3683.71',
451 '74.0.3729.2',
452 '73.0.3683.70',
453 '74.0.3729.1',
454 '74.0.3729.0',
455 '74.0.3726.4',
456 '73.0.3683.69',
457 '74.0.3726.3',
458 '74.0.3728.0',
459 '74.0.3726.2',
460 '73.0.3683.68',
461 '74.0.3726.1',
462 '74.0.3726.0',
463 '74.0.3725.4',
464 '73.0.3683.67',
465 '73.0.3683.66',
466 '74.0.3725.3',
467 '74.0.3725.2',
468 '74.0.3725.1',
469 '74.0.3724.8',
470 '74.0.3725.0',
471 '73.0.3683.65',
472 '74.0.3724.7',
473 '74.0.3724.6',
474 '74.0.3724.5',
475 '74.0.3724.4',
476 '74.0.3724.3',
477 '74.0.3724.2',
478 '74.0.3724.1',
479 '74.0.3724.0',
480 '73.0.3683.64',
481 '74.0.3723.1',
482 '74.0.3723.0',
483 '73.0.3683.63',
484 '74.0.3722.1',
485 '74.0.3722.0',
486 '73.0.3683.62',
487 '74.0.3718.9',
488 '74.0.3702.3',
489 '74.0.3721.3',
490 '74.0.3721.2',
491 '74.0.3721.1',
492 '74.0.3721.0',
493 '74.0.3720.6',
494 '73.0.3683.61',
495 '72.0.3626.122',
496 '73.0.3683.60',
497 '74.0.3720.5',
498 '72.0.3626.121',
499 '74.0.3718.8',
500 '74.0.3720.4',
501 '74.0.3720.3',
502 '74.0.3718.7',
503 '74.0.3720.2',
504 '74.0.3720.1',
505 '74.0.3720.0',
506 '74.0.3718.6',
507 '74.0.3719.5',
508 '73.0.3683.59',
509 '74.0.3718.5',
510 '74.0.3718.4',
511 '74.0.3719.4',
512 '74.0.3719.3',
513 '74.0.3719.2',
514 '74.0.3719.1',
515 '73.0.3683.58',
516 '74.0.3719.0',
517 '73.0.3683.57',
518 '73.0.3683.56',
519 '74.0.3718.3',
520 '73.0.3683.55',
521 '74.0.3718.2',
522 '74.0.3718.1',
523 '74.0.3718.0',
524 '73.0.3683.54',
525 '74.0.3717.2',
526 '73.0.3683.53',
527 '74.0.3717.1',
528 '74.0.3717.0',
529 '73.0.3683.52',
530 '74.0.3716.1',
531 '74.0.3716.0',
532 '73.0.3683.51',
533 '74.0.3715.1',
534 '74.0.3715.0',
535 '73.0.3683.50',
536 '74.0.3711.2',
537 '74.0.3714.2',
538 '74.0.3713.3',
539 '74.0.3714.1',
540 '74.0.3714.0',
541 '73.0.3683.49',
542 '74.0.3713.1',
543 '74.0.3713.0',
544 '72.0.3626.120',
545 '73.0.3683.48',
546 '74.0.3712.2',
547 '74.0.3712.1',
548 '74.0.3712.0',
549 '73.0.3683.47',
550 '72.0.3626.119',
551 '73.0.3683.46',
552 '74.0.3710.2',
553 '72.0.3626.118',
554 '74.0.3711.1',
555 '74.0.3711.0',
556 '73.0.3683.45',
557 '72.0.3626.117',
558 '74.0.3710.1',
559 '74.0.3710.0',
560 '73.0.3683.44',
561 '72.0.3626.116',
562 '74.0.3709.1',
563 '74.0.3709.0',
564 '74.0.3704.9',
565 '73.0.3683.43',
566 '72.0.3626.115',
567 '74.0.3704.8',
568 '74.0.3704.7',
569 '74.0.3708.0',
570 '74.0.3706.7',
571 '74.0.3704.6',
572 '73.0.3683.42',
573 '72.0.3626.114',
574 '74.0.3706.6',
575 '72.0.3626.113',
576 '74.0.3704.5',
577 '74.0.3706.5',
578 '74.0.3706.4',
579 '74.0.3706.3',
580 '74.0.3706.2',
581 '74.0.3706.1',
582 '74.0.3706.0',
583 '73.0.3683.41',
584 '72.0.3626.112',
585 '74.0.3705.1',
586 '74.0.3705.0',
587 '73.0.3683.40',
588 '72.0.3626.111',
589 '73.0.3683.39',
590 '74.0.3704.4',
591 '73.0.3683.38',
592 '74.0.3704.3',
593 '74.0.3704.2',
594 '74.0.3704.1',
595 '74.0.3704.0',
596 '73.0.3683.37',
597 '72.0.3626.110',
598 '72.0.3626.109',
599 '74.0.3703.3',
600 '74.0.3703.2',
601 '73.0.3683.36',
602 '74.0.3703.1',
603 '74.0.3703.0',
604 '73.0.3683.35',
605 '72.0.3626.108',
606 '74.0.3702.2',
607 '74.0.3699.3',
608 '74.0.3702.1',
609 '74.0.3702.0',
610 '73.0.3683.34',
611 '72.0.3626.107',
612 '73.0.3683.33',
613 '74.0.3701.1',
614 '74.0.3701.0',
615 '73.0.3683.32',
616 '73.0.3683.31',
617 '72.0.3626.105',
618 '74.0.3700.1',
619 '74.0.3700.0',
620 '73.0.3683.29',
621 '72.0.3626.103',
622 '74.0.3699.2',
623 '74.0.3699.1',
624 '74.0.3699.0',
625 '73.0.3683.28',
626 '72.0.3626.102',
627 '73.0.3683.27',
628 '73.0.3683.26',
629 '74.0.3698.0',
630 '74.0.3696.2',
631 '72.0.3626.101',
632 '73.0.3683.25',
633 '74.0.3696.1',
634 '74.0.3696.0',
635 '74.0.3694.8',
636 '72.0.3626.100',
637 '74.0.3694.7',
638 '74.0.3694.6',
639 '74.0.3694.5',
640 '74.0.3694.4',
641 '72.0.3626.99',
642 '72.0.3626.98',
643 '74.0.3694.3',
644 '73.0.3683.24',
645 '72.0.3626.97',
646 '72.0.3626.96',
647 '72.0.3626.95',
648 '73.0.3683.23',
649 '72.0.3626.94',
650 '73.0.3683.22',
651 '73.0.3683.21',
652 '72.0.3626.93',
653 '74.0.3694.2',
654 '72.0.3626.92',
655 '74.0.3694.1',
656 '74.0.3694.0',
657 '74.0.3693.6',
658 '73.0.3683.20',
659 '72.0.3626.91',
660 '74.0.3693.5',
661 '74.0.3693.4',
662 '74.0.3693.3',
663 '74.0.3693.2',
664 '73.0.3683.19',
665 '74.0.3693.1',
666 '74.0.3693.0',
667 '73.0.3683.18',
668 '72.0.3626.90',
669 '74.0.3692.1',
670 '74.0.3692.0',
671 '73.0.3683.17',
672 '72.0.3626.89',
673 '74.0.3687.3',
674 '74.0.3691.1',
675 '74.0.3691.0',
676 '73.0.3683.16',
677 '72.0.3626.88',
678 '72.0.3626.87',
679 '73.0.3683.15',
680 '74.0.3690.1',
681 '74.0.3690.0',
682 '73.0.3683.14',
683 '72.0.3626.86',
684 '73.0.3683.13',
685 '73.0.3683.12',
686 '74.0.3689.1',
687 '74.0.3689.0',
688 '73.0.3683.11',
689 '72.0.3626.85',
690 '73.0.3683.10',
691 '72.0.3626.84',
692 '73.0.3683.9',
693 '74.0.3688.1',
694 '74.0.3688.0',
695 '73.0.3683.8',
696 '72.0.3626.83',
697 '74.0.3687.2',
698 '74.0.3687.1',
699 '74.0.3687.0',
700 '73.0.3683.7',
701 '72.0.3626.82',
702 '74.0.3686.4',
703 '72.0.3626.81',
704 '74.0.3686.3',
705 '74.0.3686.2',
706 '74.0.3686.1',
707 '74.0.3686.0',
708 '73.0.3683.6',
709 '72.0.3626.80',
710 '74.0.3685.1',
711 '74.0.3685.0',
712 '73.0.3683.5',
713 '72.0.3626.79',
714 '74.0.3684.1',
715 '74.0.3684.0',
716 '73.0.3683.4',
717 '72.0.3626.78',
718 '72.0.3626.77',
719 '73.0.3683.3',
720 '73.0.3683.2',
721 '72.0.3626.76',
722 '73.0.3683.1',
723 '73.0.3683.0',
724 '72.0.3626.75',
725 '71.0.3578.141',
726 '73.0.3682.1',
727 '73.0.3682.0',
728 '72.0.3626.74',
729 '71.0.3578.140',
730 '73.0.3681.4',
731 '73.0.3681.3',
732 '73.0.3681.2',
733 '73.0.3681.1',
734 '73.0.3681.0',
735 '72.0.3626.73',
736 '71.0.3578.139',
737 '72.0.3626.72',
738 '72.0.3626.71',
739 '73.0.3680.1',
740 '73.0.3680.0',
741 '72.0.3626.70',
742 '71.0.3578.138',
743 '73.0.3678.2',
744 '73.0.3679.1',
745 '73.0.3679.0',
746 '72.0.3626.69',
747 '71.0.3578.137',
748 '73.0.3678.1',
749 '73.0.3678.0',
750 '71.0.3578.136',
751 '73.0.3677.1',
752 '73.0.3677.0',
753 '72.0.3626.68',
754 '72.0.3626.67',
755 '71.0.3578.135',
756 '73.0.3676.1',
757 '73.0.3676.0',
758 '73.0.3674.2',
759 '72.0.3626.66',
760 '71.0.3578.134',
761 '73.0.3674.1',
762 '73.0.3674.0',
763 '72.0.3626.65',
764 '71.0.3578.133',
765 '73.0.3673.2',
766 '73.0.3673.1',
767 '73.0.3673.0',
768 '72.0.3626.64',
769 '71.0.3578.132',
770 '72.0.3626.63',
771 '72.0.3626.62',
772 '72.0.3626.61',
773 '72.0.3626.60',
774 '73.0.3672.1',
775 '73.0.3672.0',
776 '72.0.3626.59',
777 '71.0.3578.131',
778 '73.0.3671.3',
779 '73.0.3671.2',
780 '73.0.3671.1',
781 '73.0.3671.0',
782 '72.0.3626.58',
783 '71.0.3578.130',
784 '73.0.3670.1',
785 '73.0.3670.0',
786 '72.0.3626.57',
787 '71.0.3578.129',
788 '73.0.3669.1',
789 '73.0.3669.0',
790 '72.0.3626.56',
791 '71.0.3578.128',
792 '73.0.3668.2',
793 '73.0.3668.1',
794 '73.0.3668.0',
795 '72.0.3626.55',
796 '71.0.3578.127',
797 '73.0.3667.2',
798 '73.0.3667.1',
799 '73.0.3667.0',
800 '72.0.3626.54',
801 '71.0.3578.126',
802 '73.0.3666.1',
803 '73.0.3666.0',
804 '72.0.3626.53',
805 '71.0.3578.125',
806 '73.0.3665.4',
807 '73.0.3665.3',
808 '72.0.3626.52',
809 '73.0.3665.2',
810 '73.0.3664.4',
811 '73.0.3665.1',
812 '73.0.3665.0',
813 '72.0.3626.51',
814 '71.0.3578.124',
815 '72.0.3626.50',
816 '73.0.3664.3',
817 '73.0.3664.2',
818 '73.0.3664.1',
819 '73.0.3664.0',
820 '73.0.3663.2',
821 '72.0.3626.49',
822 '71.0.3578.123',
823 '73.0.3663.1',
824 '73.0.3663.0',
825 '72.0.3626.48',
826 '71.0.3578.122',
827 '73.0.3662.1',
828 '73.0.3662.0',
829 '72.0.3626.47',
830 '71.0.3578.121',
831 '73.0.3661.1',
832 '72.0.3626.46',
833 '73.0.3661.0',
834 '72.0.3626.45',
835 '71.0.3578.120',
836 '73.0.3660.2',
837 '73.0.3660.1',
838 '73.0.3660.0',
839 '72.0.3626.44',
840 '71.0.3578.119',
841 '73.0.3659.1',
842 '73.0.3659.0',
843 '72.0.3626.43',
844 '71.0.3578.118',
845 '73.0.3658.1',
846 '73.0.3658.0',
847 '72.0.3626.42',
848 '71.0.3578.117',
849 '73.0.3657.1',
850 '73.0.3657.0',
851 '72.0.3626.41',
852 '71.0.3578.116',
853 '73.0.3656.1',
854 '73.0.3656.0',
855 '72.0.3626.40',
856 '71.0.3578.115',
857 '73.0.3655.1',
858 '73.0.3655.0',
859 '72.0.3626.39',
860 '71.0.3578.114',
861 '73.0.3654.1',
862 '73.0.3654.0',
863 '72.0.3626.38',
864 '71.0.3578.113',
865 '73.0.3653.1',
866 '73.0.3653.0',
867 '72.0.3626.37',
868 '71.0.3578.112',
869 '73.0.3652.1',
870 '73.0.3652.0',
871 '72.0.3626.36',
872 '71.0.3578.111',
873 '73.0.3651.1',
874 '73.0.3651.0',
875 '72.0.3626.35',
876 '71.0.3578.110',
877 '73.0.3650.1',
878 '73.0.3650.0',
879 '72.0.3626.34',
880 '71.0.3578.109',
881 '73.0.3649.1',
882 '73.0.3649.0',
883 '72.0.3626.33',
884 '71.0.3578.108',
885 '73.0.3648.2',
886 '73.0.3648.1',
887 '73.0.3648.0',
888 '72.0.3626.32',
889 '71.0.3578.107',
890 '73.0.3647.2',
891 '73.0.3647.1',
892 '73.0.3647.0',
893 '72.0.3626.31',
894 '71.0.3578.106',
895 '73.0.3635.3',
896 '73.0.3646.2',
897 '73.0.3646.1',
898 '73.0.3646.0',
899 '72.0.3626.30',
900 '71.0.3578.105',
901 '72.0.3626.29',
902 '73.0.3645.2',
903 '73.0.3645.1',
904 '73.0.3645.0',
905 '72.0.3626.28',
906 '71.0.3578.104',
907 '72.0.3626.27',
908 '72.0.3626.26',
909 '72.0.3626.25',
910 '72.0.3626.24',
911 '73.0.3644.0',
912 '73.0.3643.2',
913 '72.0.3626.23',
914 '71.0.3578.103',
915 '73.0.3643.1',
916 '73.0.3643.0',
917 '72.0.3626.22',
918 '71.0.3578.102',
919 '73.0.3642.1',
920 '73.0.3642.0',
921 '72.0.3626.21',
922 '71.0.3578.101',
923 '73.0.3641.1',
924 '73.0.3641.0',
925 '72.0.3626.20',
926 '71.0.3578.100',
927 '72.0.3626.19',
928 '73.0.3640.1',
929 '73.0.3640.0',
930 '72.0.3626.18',
931 '73.0.3639.1',
932 '71.0.3578.99',
933 '73.0.3639.0',
934 '72.0.3626.17',
935 '73.0.3638.2',
936 '72.0.3626.16',
937 '73.0.3638.1',
938 '73.0.3638.0',
939 '72.0.3626.15',
940 '71.0.3578.98',
941 '73.0.3635.2',
942 '71.0.3578.97',
943 '73.0.3637.1',
944 '73.0.3637.0',
945 '72.0.3626.14',
946 '71.0.3578.96',
947 '71.0.3578.95',
948 '72.0.3626.13',
949 '71.0.3578.94',
950 '73.0.3636.2',
951 '71.0.3578.93',
952 '73.0.3636.1',
953 '73.0.3636.0',
954 '72.0.3626.12',
955 '71.0.3578.92',
956 '73.0.3635.1',
957 '73.0.3635.0',
958 '72.0.3626.11',
959 '71.0.3578.91',
960 '73.0.3634.2',
961 '73.0.3634.1',
962 '73.0.3634.0',
963 '72.0.3626.10',
964 '71.0.3578.90',
965 '71.0.3578.89',
966 '73.0.3633.2',
967 '73.0.3633.1',
968 '73.0.3633.0',
969 '72.0.3610.4',
970 '72.0.3626.9',
971 '71.0.3578.88',
972 '73.0.3632.5',
973 '73.0.3632.4',
974 '73.0.3632.3',
975 '73.0.3632.2',
976 '73.0.3632.1',
977 '73.0.3632.0',
978 '72.0.3626.8',
979 '71.0.3578.87',
980 '73.0.3631.2',
981 '73.0.3631.1',
982 '73.0.3631.0',
983 '72.0.3626.7',
984 '71.0.3578.86',
985 '72.0.3626.6',
986 '73.0.3630.1',
987 '73.0.3630.0',
988 '72.0.3626.5',
989 '71.0.3578.85',
990 '72.0.3626.4',
991 '73.0.3628.3',
992 '73.0.3628.2',
993 '73.0.3629.1',
994 '73.0.3629.0',
995 '72.0.3626.3',
996 '71.0.3578.84',
997 '73.0.3628.1',
998 '73.0.3628.0',
999 '71.0.3578.83',
1000 '73.0.3627.1',
1001 '73.0.3627.0',
1002 '72.0.3626.2',
1003 '71.0.3578.82',
1004 '71.0.3578.81',
1005 '71.0.3578.80',
1006 '72.0.3626.1',
1007 '72.0.3626.0',
1008 '71.0.3578.79',
1009 '70.0.3538.124',
1010 '71.0.3578.78',
1011 '72.0.3623.4',
1012 '72.0.3625.2',
1013 '72.0.3625.1',
1014 '72.0.3625.0',
1015 '71.0.3578.77',
1016 '70.0.3538.123',
1017 '72.0.3624.4',
1018 '72.0.3624.3',
1019 '72.0.3624.2',
1020 '71.0.3578.76',
1021 '72.0.3624.1',
1022 '72.0.3624.0',
1023 '72.0.3623.3',
1024 '71.0.3578.75',
1025 '70.0.3538.122',
1026 '71.0.3578.74',
1027 '72.0.3623.2',
1028 '72.0.3610.3',
1029 '72.0.3623.1',
1030 '72.0.3623.0',
1031 '72.0.3622.3',
1032 '72.0.3622.2',
1033 '71.0.3578.73',
1034 '70.0.3538.121',
1035 '72.0.3622.1',
1036 '72.0.3622.0',
1037 '71.0.3578.72',
1038 '70.0.3538.120',
1039 '72.0.3621.1',
1040 '72.0.3621.0',
1041 '71.0.3578.71',
1042 '70.0.3538.119',
1043 '72.0.3620.1',
1044 '72.0.3620.0',
1045 '71.0.3578.70',
1046 '70.0.3538.118',
1047 '71.0.3578.69',
1048 '72.0.3619.1',
1049 '72.0.3619.0',
1050 '71.0.3578.68',
1051 '70.0.3538.117',
1052 '71.0.3578.67',
1053 '72.0.3618.1',
1054 '72.0.3618.0',
1055 '71.0.3578.66',
1056 '70.0.3538.116',
1057 '72.0.3617.1',
1058 '72.0.3617.0',
1059 '71.0.3578.65',
1060 '70.0.3538.115',
1061 '72.0.3602.3',
1062 '71.0.3578.64',
1063 '72.0.3616.1',
1064 '72.0.3616.0',
1065 '71.0.3578.63',
1066 '70.0.3538.114',
1067 '71.0.3578.62',
1068 '72.0.3615.1',
1069 '72.0.3615.0',
1070 '71.0.3578.61',
1071 '70.0.3538.113',
1072 '72.0.3614.1',
1073 '72.0.3614.0',
1074 '71.0.3578.60',
1075 '70.0.3538.112',
1076 '72.0.3613.1',
1077 '72.0.3613.0',
1078 '71.0.3578.59',
1079 '70.0.3538.111',
1080 '72.0.3612.2',
1081 '72.0.3612.1',
1082 '72.0.3612.0',
1083 '70.0.3538.110',
1084 '71.0.3578.58',
1085 '70.0.3538.109',
1086 '72.0.3611.2',
1087 '72.0.3611.1',
1088 '72.0.3611.0',
1089 '71.0.3578.57',
1090 '70.0.3538.108',
1091 '72.0.3610.2',
1092 '71.0.3578.56',
1093 '71.0.3578.55',
1094 '72.0.3610.1',
1095 '72.0.3610.0',
1096 '71.0.3578.54',
1097 '70.0.3538.107',
1098 '71.0.3578.53',
1099 '72.0.3609.3',
1100 '71.0.3578.52',
1101 '72.0.3609.2',
1102 '71.0.3578.51',
1103 '72.0.3608.5',
1104 '72.0.3609.1',
1105 '72.0.3609.0',
1106 '71.0.3578.50',
1107 '70.0.3538.106',
1108 '72.0.3608.4',
1109 '72.0.3608.3',
1110 '72.0.3608.2',
1111 '71.0.3578.49',
1112 '72.0.3608.1',
1113 '72.0.3608.0',
1114 '70.0.3538.105',
1115 '71.0.3578.48',
1116 '72.0.3607.1',
1117 '72.0.3607.0',
1118 '71.0.3578.47',
1119 '70.0.3538.104',
1120 '72.0.3606.2',
1121 '72.0.3606.1',
1122 '72.0.3606.0',
1123 '71.0.3578.46',
1124 '70.0.3538.103',
1125 '70.0.3538.102',
1126 '72.0.3605.3',
1127 '72.0.3605.2',
1128 '72.0.3605.1',
1129 '72.0.3605.0',
1130 '71.0.3578.45',
1131 '70.0.3538.101',
1132 '71.0.3578.44',
1133 '71.0.3578.43',
1134 '70.0.3538.100',
1135 '70.0.3538.99',
1136 '71.0.3578.42',
1137 '72.0.3604.1',
1138 '72.0.3604.0',
1139 '71.0.3578.41',
1140 '70.0.3538.98',
1141 '71.0.3578.40',
1142 '72.0.3603.2',
1143 '72.0.3603.1',
1144 '72.0.3603.0',
1145 '71.0.3578.39',
1146 '70.0.3538.97',
1147 '72.0.3602.2',
1148 '71.0.3578.38',
1149 '71.0.3578.37',
1150 '72.0.3602.1',
1151 '72.0.3602.0',
1152 '71.0.3578.36',
1153 '70.0.3538.96',
1154 '72.0.3601.1',
1155 '72.0.3601.0',
1156 '71.0.3578.35',
1157 '70.0.3538.95',
1158 '72.0.3600.1',
1159 '72.0.3600.0',
1160 '71.0.3578.34',
1161 '70.0.3538.94',
1162 '72.0.3599.3',
1163 '72.0.3599.2',
1164 '72.0.3599.1',
1165 '72.0.3599.0',
1166 '71.0.3578.33',
1167 '70.0.3538.93',
1168 '72.0.3598.1',
1169 '72.0.3598.0',
1170 '71.0.3578.32',
1171 '70.0.3538.87',
1172 '72.0.3597.1',
1173 '72.0.3597.0',
1174 '72.0.3596.2',
1175 '71.0.3578.31',
1176 '70.0.3538.86',
1177 '71.0.3578.30',
1178 '71.0.3578.29',
1179 '72.0.3596.1',
1180 '72.0.3596.0',
1181 '71.0.3578.28',
1182 '70.0.3538.85',
1183 '72.0.3595.2',
1184 '72.0.3591.3',
1185 '72.0.3595.1',
1186 '72.0.3595.0',
1187 '71.0.3578.27',
1188 '70.0.3538.84',
1189 '72.0.3594.1',
1190 '72.0.3594.0',
1191 '71.0.3578.26',
1192 '70.0.3538.83',
1193 '72.0.3593.2',
1194 '72.0.3593.1',
1195 '72.0.3593.0',
1196 '71.0.3578.25',
1197 '70.0.3538.82',
1198 '72.0.3589.3',
1199 '72.0.3592.2',
1200 '72.0.3592.1',
1201 '72.0.3592.0',
1202 '71.0.3578.24',
1203 '72.0.3589.2',
1204 '70.0.3538.81',
1205 '70.0.3538.80',
1206 '72.0.3591.2',
1207 '72.0.3591.1',
1208 '72.0.3591.0',
1209 '71.0.3578.23',
1210 '70.0.3538.79',
1211 '71.0.3578.22',
1212 '72.0.3590.1',
1213 '72.0.3590.0',
1214 '71.0.3578.21',
1215 '70.0.3538.78',
1216 '70.0.3538.77',
1217 '72.0.3589.1',
1218 '72.0.3589.0',
1219 '71.0.3578.20',
1220 '70.0.3538.76',
1221 '71.0.3578.19',
1222 '70.0.3538.75',
1223 '72.0.3588.1',
1224 '72.0.3588.0',
1225 '71.0.3578.18',
1226 '70.0.3538.74',
1227 '72.0.3586.2',
1228 '72.0.3587.0',
1229 '71.0.3578.17',
1230 '70.0.3538.73',
1231 '72.0.3586.1',
1232 '72.0.3586.0',
1233 '71.0.3578.16',
1234 '70.0.3538.72',
1235 '72.0.3585.1',
1236 '72.0.3585.0',
1237 '71.0.3578.15',
1238 '70.0.3538.71',
1239 '71.0.3578.14',
1240 '72.0.3584.1',
1241 '72.0.3584.0',
1242 '71.0.3578.13',
1243 '70.0.3538.70',
1244 '72.0.3583.2',
1245 '71.0.3578.12',
1246 '72.0.3583.1',
1247 '72.0.3583.0',
1248 '71.0.3578.11',
1249 '70.0.3538.69',
1250 '71.0.3578.10',
1251 '72.0.3582.0',
1252 '72.0.3581.4',
1253 '71.0.3578.9',
1254 '70.0.3538.67',
1255 '72.0.3581.3',
1256 '72.0.3581.2',
1257 '72.0.3581.1',
1258 '72.0.3581.0',
1259 '71.0.3578.8',
1260 '70.0.3538.66',
1261 '72.0.3580.1',
1262 '72.0.3580.0',
1263 '71.0.3578.7',
1264 '70.0.3538.65',
1265 '71.0.3578.6',
1266 '72.0.3579.1',
1267 '72.0.3579.0',
1268 '71.0.3578.5',
1269 '70.0.3538.64',
1270 '71.0.3578.4',
1271 '71.0.3578.3',
1272 '71.0.3578.2',
1273 '71.0.3578.1',
1274 '71.0.3578.0',
1275 '70.0.3538.63',
1276 '69.0.3497.128',
1277 '70.0.3538.62',
1278 '70.0.3538.61',
1279 '70.0.3538.60',
1280 '70.0.3538.59',
1281 '71.0.3577.1',
1282 '71.0.3577.0',
1283 '70.0.3538.58',
1284 '69.0.3497.127',
1285 '71.0.3576.2',
1286 '71.0.3576.1',
1287 '71.0.3576.0',
1288 '70.0.3538.57',
1289 '70.0.3538.56',
1290 '71.0.3575.2',
1291 '70.0.3538.55',
1292 '69.0.3497.126',
1293 '70.0.3538.54',
1294 '71.0.3575.1',
1295 '71.0.3575.0',
1296 '71.0.3574.1',
1297 '71.0.3574.0',
1298 '70.0.3538.53',
1299 '69.0.3497.125',
1300 '70.0.3538.52',
1301 '71.0.3573.1',
1302 '71.0.3573.0',
1303 '70.0.3538.51',
1304 '69.0.3497.124',
1305 '71.0.3572.1',
1306 '71.0.3572.0',
1307 '70.0.3538.50',
1308 '69.0.3497.123',
1309 '71.0.3571.2',
1310 '70.0.3538.49',
1311 '69.0.3497.122',
1312 '71.0.3571.1',
1313 '71.0.3571.0',
1314 '70.0.3538.48',
1315 '69.0.3497.121',
1316 '71.0.3570.1',
1317 '71.0.3570.0',
1318 '70.0.3538.47',
1319 '69.0.3497.120',
1320 '71.0.3568.2',
1321 '71.0.3569.1',
1322 '71.0.3569.0',
1323 '70.0.3538.46',
1324 '69.0.3497.119',
1325 '70.0.3538.45',
1326 '71.0.3568.1',
1327 '71.0.3568.0',
1328 '70.0.3538.44',
1329 '69.0.3497.118',
1330 '70.0.3538.43',
1331 '70.0.3538.42',
1332 '71.0.3567.1',
1333 '71.0.3567.0',
1334 '70.0.3538.41',
1335 '69.0.3497.117',
1336 '71.0.3566.1',
1337 '71.0.3566.0',
1338 '70.0.3538.40',
1339 '69.0.3497.116',
1340 '71.0.3565.1',
1341 '71.0.3565.0',
1342 '70.0.3538.39',
1343 '69.0.3497.115',
1344 '71.0.3564.1',
1345 '71.0.3564.0',
1346 '70.0.3538.38',
1347 '69.0.3497.114',
1348 '71.0.3563.0',
1349 '71.0.3562.2',
1350 '70.0.3538.37',
1351 '69.0.3497.113',
1352 '70.0.3538.36',
1353 '70.0.3538.35',
1354 '71.0.3562.1',
1355 '71.0.3562.0',
1356 '70.0.3538.34',
1357 '69.0.3497.112',
1358 '70.0.3538.33',
1359 '71.0.3561.1',
1360 '71.0.3561.0',
1361 '70.0.3538.32',
1362 '69.0.3497.111',
1363 '71.0.3559.6',
1364 '71.0.3560.1',
1365 '71.0.3560.0',
1366 '71.0.3559.5',
1367 '71.0.3559.4',
1368 '70.0.3538.31',
1369 '69.0.3497.110',
1370 '71.0.3559.3',
1371 '70.0.3538.30',
1372 '69.0.3497.109',
1373 '71.0.3559.2',
1374 '71.0.3559.1',
1375 '71.0.3559.0',
1376 '70.0.3538.29',
1377 '69.0.3497.108',
1378 '71.0.3558.2',
1379 '71.0.3558.1',
1380 '71.0.3558.0',
1381 '70.0.3538.28',
1382 '69.0.3497.107',
1383 '71.0.3557.2',
1384 '71.0.3557.1',
1385 '71.0.3557.0',
1386 '70.0.3538.27',
1387 '69.0.3497.106',
1388 '71.0.3554.4',
1389 '70.0.3538.26',
1390 '71.0.3556.1',
1391 '71.0.3556.0',
1392 '70.0.3538.25',
1393 '71.0.3554.3',
1394 '69.0.3497.105',
1395 '71.0.3554.2',
1396 '70.0.3538.24',
1397 '69.0.3497.104',
1398 '71.0.3555.2',
1399 '70.0.3538.23',
1400 '71.0.3555.1',
1401 '71.0.3555.0',
1402 '70.0.3538.22',
1403 '69.0.3497.103',
1404 '71.0.3554.1',
1405 '71.0.3554.0',
1406 '70.0.3538.21',
1407 '69.0.3497.102',
1408 '71.0.3553.3',
1409 '70.0.3538.20',
1410 '69.0.3497.101',
1411 '71.0.3553.2',
1412 '69.0.3497.100',
1413 '71.0.3553.1',
1414 '71.0.3553.0',
1415 '70.0.3538.19',
1416 '69.0.3497.99',
1417 '69.0.3497.98',
1418 '69.0.3497.97',
1419 '71.0.3552.6',
1420 '71.0.3552.5',
1421 '71.0.3552.4',
1422 '71.0.3552.3',
1423 '71.0.3552.2',
1424 '71.0.3552.1',
1425 '71.0.3552.0',
1426 '70.0.3538.18',
1427 '69.0.3497.96',
1428 '71.0.3551.3',
1429 '71.0.3551.2',
1430 '71.0.3551.1',
1431 '71.0.3551.0',
1432 '70.0.3538.17',
1433 '69.0.3497.95',
1434 '71.0.3550.3',
1435 '71.0.3550.2',
1436 '71.0.3550.1',
1437 '71.0.3550.0',
1438 '70.0.3538.16',
1439 '69.0.3497.94',
1440 '71.0.3549.1',
1441 '71.0.3549.0',
1442 '70.0.3538.15',
1443 '69.0.3497.93',
1444 '69.0.3497.92',
1445 '71.0.3548.1',
1446 '71.0.3548.0',
1447 '70.0.3538.14',
1448 '69.0.3497.91',
1449 '71.0.3547.1',
1450 '71.0.3547.0',
1451 '70.0.3538.13',
1452 '69.0.3497.90',
1453 '71.0.3546.2',
1454 '69.0.3497.89',
1455 '71.0.3546.1',
1456 '71.0.3546.0',
1457 '70.0.3538.12',
1458 '69.0.3497.88',
1459 '71.0.3545.4',
1460 '71.0.3545.3',
1461 '71.0.3545.2',
1462 '71.0.3545.1',
1463 '71.0.3545.0',
1464 '70.0.3538.11',
1465 '69.0.3497.87',
1466 '71.0.3544.5',
1467 '71.0.3544.4',
1468 '71.0.3544.3',
1469 '71.0.3544.2',
1470 '71.0.3544.1',
1471 '71.0.3544.0',
1472 '69.0.3497.86',
1473 '70.0.3538.10',
1474 '69.0.3497.85',
1475 '70.0.3538.9',
1476 '69.0.3497.84',
1477 '71.0.3543.4',
1478 '70.0.3538.8',
1479 '71.0.3543.3',
1480 '71.0.3543.2',
1481 '71.0.3543.1',
1482 '71.0.3543.0',
1483 '70.0.3538.7',
1484 '69.0.3497.83',
1485 '71.0.3542.2',
1486 '71.0.3542.1',
1487 '71.0.3542.0',
1488 '70.0.3538.6',
1489 '69.0.3497.82',
1490 '69.0.3497.81',
1491 '71.0.3541.1',
1492 '71.0.3541.0',
1493 '70.0.3538.5',
1494 '69.0.3497.80',
1495 '71.0.3540.1',
1496 '71.0.3540.0',
1497 '70.0.3538.4',
1498 '69.0.3497.79',
1499 '70.0.3538.3',
1500 '71.0.3539.1',
1501 '71.0.3539.0',
1502 '69.0.3497.78',
1503 '68.0.3440.134',
1504 '69.0.3497.77',
1505 '70.0.3538.2',
1506 '70.0.3538.1',
1507 '70.0.3538.0',
1508 '69.0.3497.76',
1509 '68.0.3440.133',
1510 '69.0.3497.75',
1511 '70.0.3537.2',
1512 '70.0.3537.1',
1513 '70.0.3537.0',
1514 '69.0.3497.74',
1515 '68.0.3440.132',
1516 '70.0.3536.0',
1517 '70.0.3535.5',
1518 '70.0.3535.4',
1519 '70.0.3535.3',
1520 '69.0.3497.73',
1521 '68.0.3440.131',
1522 '70.0.3532.8',
1523 '70.0.3532.7',
1524 '69.0.3497.72',
1525 '69.0.3497.71',
1526 '70.0.3535.2',
1527 '70.0.3535.1',
1528 '70.0.3535.0',
1529 '69.0.3497.70',
1530 '68.0.3440.130',
1531 '69.0.3497.69',
1532 '68.0.3440.129',
1533 '70.0.3534.4',
1534 '70.0.3534.3',
1535 '70.0.3534.2',
1536 '70.0.3534.1',
1537 '70.0.3534.0',
1538 '69.0.3497.68',
1539 '68.0.3440.128',
1540 '70.0.3533.2',
1541 '70.0.3533.1',
1542 '70.0.3533.0',
1543 '69.0.3497.67',
1544 '68.0.3440.127',
1545 '70.0.3532.6',
1546 '70.0.3532.5',
1547 '70.0.3532.4',
1548 '69.0.3497.66',
1549 '68.0.3440.126',
1550 '70.0.3532.3',
1551 '70.0.3532.2',
1552 '70.0.3532.1',
1553 '69.0.3497.60',
1554 '69.0.3497.65',
1555 '69.0.3497.64',
1556 '70.0.3532.0',
1557 '70.0.3531.0',
1558 '70.0.3530.4',
1559 '70.0.3530.3',
1560 '70.0.3530.2',
1561 '69.0.3497.58',
1562 '68.0.3440.125',
1563 '69.0.3497.57',
1564 '69.0.3497.56',
1565 '69.0.3497.55',
1566 '69.0.3497.54',
1567 '70.0.3530.1',
1568 '70.0.3530.0',
1569 '69.0.3497.53',
1570 '68.0.3440.124',
1571 '69.0.3497.52',
1572 '70.0.3529.3',
1573 '70.0.3529.2',
1574 '70.0.3529.1',
1575 '70.0.3529.0',
1576 '69.0.3497.51',
1577 '70.0.3528.4',
1578 '68.0.3440.123',
1579 '70.0.3528.3',
1580 '70.0.3528.2',
1581 '70.0.3528.1',
1582 '70.0.3528.0',
1583 '69.0.3497.50',
1584 '68.0.3440.122',
1585 '70.0.3527.1',
1586 '70.0.3527.0',
1587 '69.0.3497.49',
1588 '68.0.3440.121',
1589 '70.0.3526.1',
1590 '70.0.3526.0',
1591 '68.0.3440.120',
1592 '69.0.3497.48',
1593 '69.0.3497.47',
1594 '68.0.3440.119',
1595 '68.0.3440.118',
1596 '70.0.3525.5',
1597 '70.0.3525.4',
1598 '70.0.3525.3',
1599 '68.0.3440.117',
1600 '69.0.3497.46',
1601 '70.0.3525.2',
1602 '70.0.3525.1',
1603 '70.0.3525.0',
1604 '69.0.3497.45',
1605 '68.0.3440.116',
1606 '70.0.3524.4',
1607 '70.0.3524.3',
1608 '69.0.3497.44',
1609 '70.0.3524.2',
1610 '70.0.3524.1',
1611 '70.0.3524.0',
1612 '70.0.3523.2',
1613 '69.0.3497.43',
1614 '68.0.3440.115',
1615 '70.0.3505.9',
1616 '69.0.3497.42',
1617 '70.0.3505.8',
1618 '70.0.3523.1',
1619 '70.0.3523.0',
1620 '69.0.3497.41',
1621 '68.0.3440.114',
1622 '70.0.3505.7',
1623 '69.0.3497.40',
1624 '70.0.3522.1',
1625 '70.0.3522.0',
1626 '70.0.3521.2',
1627 '69.0.3497.39',
1628 '68.0.3440.113',
1629 '70.0.3505.6',
1630 '70.0.3521.1',
1631 '70.0.3521.0',
1632 '69.0.3497.38',
1633 '68.0.3440.112',
1634 '70.0.3520.1',
1635 '70.0.3520.0',
1636 '69.0.3497.37',
1637 '68.0.3440.111',
1638 '70.0.3519.3',
1639 '70.0.3519.2',
1640 '70.0.3519.1',
1641 '70.0.3519.0',
1642 '69.0.3497.36',
1643 '68.0.3440.110',
1644 '70.0.3518.1',
1645 '70.0.3518.0',
1646 '69.0.3497.35',
1647 '69.0.3497.34',
1648 '68.0.3440.109',
1649 '70.0.3517.1',
1650 '70.0.3517.0',
1651 '69.0.3497.33',
1652 '68.0.3440.108',
1653 '69.0.3497.32',
1654 '70.0.3516.3',
1655 '70.0.3516.2',
1656 '70.0.3516.1',
1657 '70.0.3516.0',
1658 '69.0.3497.31',
1659 '68.0.3440.107',
1660 '70.0.3515.4',
1661 '68.0.3440.106',
1662 '70.0.3515.3',
1663 '70.0.3515.2',
1664 '70.0.3515.1',
1665 '70.0.3515.0',
1666 '69.0.3497.30',
1667 '68.0.3440.105',
1668 '68.0.3440.104',
1669 '70.0.3514.2',
1670 '70.0.3514.1',
1671 '70.0.3514.0',
1672 '69.0.3497.29',
1673 '68.0.3440.103',
1674 '70.0.3513.1',
1675 '70.0.3513.0',
1676 '69.0.3497.28',
f7a147e3
S
1677 )
1678 return _USER_AGENT_TPL % random.choice(_CHROME_VERSIONS)
1679
1680
3e669f36 1681std_headers = {
f7a147e3 1682 'User-Agent': random_user_agent(),
59ae15a5
PH
1683 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
1684 'Accept-Encoding': 'gzip, deflate',
1685 'Accept-Language': 'en-us,en;q=0.5',
3e669f36 1686}
f427df17 1687
5f6a1245 1688
fb37eb25
S
1689USER_AGENTS = {
1690 'Safari': 'Mozilla/5.0 (X11; Linux x86_64; rv:10.0) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27',
1691}
1692
1693
bf42a990
S
1694NO_DEFAULT = object()
1695
7105440c
YCH
1696ENGLISH_MONTH_NAMES = [
1697 'January', 'February', 'March', 'April', 'May', 'June',
1698 'July', 'August', 'September', 'October', 'November', 'December']
1699
f6717dec
S
1700MONTH_NAMES = {
1701 'en': ENGLISH_MONTH_NAMES,
1702 'fr': [
3e4185c3
S
1703 'janvier', 'février', 'mars', 'avril', 'mai', 'juin',
1704 'juillet', 'août', 'septembre', 'octobre', 'novembre', 'décembre'],
f6717dec 1705}
a942d6cb 1706
a7aaa398
S
1707KNOWN_EXTENSIONS = (
1708 'mp4', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'aac',
1709 'flv', 'f4v', 'f4a', 'f4b',
1710 'webm', 'ogg', 'ogv', 'oga', 'ogx', 'spx', 'opus',
1711 'mkv', 'mka', 'mk3d',
1712 'avi', 'divx',
1713 'mov',
1714 'asf', 'wmv', 'wma',
1715 '3gp', '3g2',
1716 'mp3',
1717 'flac',
1718 'ape',
1719 'wav',
1720 'f4f', 'f4m', 'm3u8', 'smil')
1721
c587cbb7 1722# needed for sanitizing filenames in restricted mode
c8827027 1723ACCENT_CHARS = dict(zip('ÂÃÄÀÁÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖŐØŒÙÚÛÜŰÝÞßàáâãäåæçèéêëìíîïðñòóôõöőøœùúûüűýþÿ',
fd35d8cd
JW
1724 itertools.chain('AAAAAA', ['AE'], 'CEEEEIIIIDNOOOOOOO', ['OE'], 'UUUUUY', ['TH', 'ss'],
1725 'aaaaaa', ['ae'], 'ceeeeiiiionooooooo', ['oe'], 'uuuuuy', ['th'], 'y')))
c587cbb7 1726
46f59e89
S
1727DATE_FORMATS = (
1728 '%d %B %Y',
1729 '%d %b %Y',
1730 '%B %d %Y',
cb655f34
S
1731 '%B %dst %Y',
1732 '%B %dnd %Y',
9d30c213 1733 '%B %drd %Y',
cb655f34 1734 '%B %dth %Y',
46f59e89 1735 '%b %d %Y',
cb655f34
S
1736 '%b %dst %Y',
1737 '%b %dnd %Y',
9d30c213 1738 '%b %drd %Y',
cb655f34 1739 '%b %dth %Y',
46f59e89
S
1740 '%b %dst %Y %I:%M',
1741 '%b %dnd %Y %I:%M',
9d30c213 1742 '%b %drd %Y %I:%M',
46f59e89
S
1743 '%b %dth %Y %I:%M',
1744 '%Y %m %d',
1745 '%Y-%m-%d',
bccdbd22 1746 '%Y.%m.%d.',
46f59e89 1747 '%Y/%m/%d',
81c13222 1748 '%Y/%m/%d %H:%M',
46f59e89 1749 '%Y/%m/%d %H:%M:%S',
1931a55e
THD
1750 '%Y%m%d%H%M',
1751 '%Y%m%d%H%M%S',
4f3fa23e 1752 '%Y%m%d',
0c1c6f4b 1753 '%Y-%m-%d %H:%M',
46f59e89
S
1754 '%Y-%m-%d %H:%M:%S',
1755 '%Y-%m-%d %H:%M:%S.%f',
5014558a 1756 '%Y-%m-%d %H:%M:%S:%f',
46f59e89
S
1757 '%d.%m.%Y %H:%M',
1758 '%d.%m.%Y %H.%M',
1759 '%Y-%m-%dT%H:%M:%SZ',
1760 '%Y-%m-%dT%H:%M:%S.%fZ',
1761 '%Y-%m-%dT%H:%M:%S.%f0Z',
1762 '%Y-%m-%dT%H:%M:%S',
1763 '%Y-%m-%dT%H:%M:%S.%f',
1764 '%Y-%m-%dT%H:%M',
c6eed6b8
S
1765 '%b %d %Y at %H:%M',
1766 '%b %d %Y at %H:%M:%S',
b555ae9b
S
1767 '%B %d %Y at %H:%M',
1768 '%B %d %Y at %H:%M:%S',
a63d9bd0 1769 '%H:%M %d-%b-%Y',
46f59e89
S
1770)
1771
1772DATE_FORMATS_DAY_FIRST = list(DATE_FORMATS)
1773DATE_FORMATS_DAY_FIRST.extend([
1774 '%d-%m-%Y',
1775 '%d.%m.%Y',
1776 '%d.%m.%y',
1777 '%d/%m/%Y',
1778 '%d/%m/%y',
1779 '%d/%m/%Y %H:%M:%S',
1780])
1781
1782DATE_FORMATS_MONTH_FIRST = list(DATE_FORMATS)
1783DATE_FORMATS_MONTH_FIRST.extend([
1784 '%m-%d-%Y',
1785 '%m.%d.%Y',
1786 '%m/%d/%Y',
1787 '%m/%d/%y',
1788 '%m/%d/%Y %H:%M:%S',
1789])
1790
06b3fe29 1791PACKED_CODES_RE = r"}\('(.+)',(\d+),(\d+),'([^']+)'\.split\('\|'\)"
22f5f5c6 1792JSON_LD_RE = r'(?is)<script[^>]+type=(["\']?)application/ld\+json\1[^>]*>(?P<json_ld>.+?)</script>'
06b3fe29 1793
7105440c 1794
d77c3dfd 1795def preferredencoding():
59ae15a5 1796 """Get preferred encoding.
d77c3dfd 1797
59ae15a5
PH
1798 Returns the best encoding scheme for the system, based on
1799 locale.getpreferredencoding() and some further tweaks.
1800 """
1801 try:
1802 pref = locale.getpreferredencoding()
28e614de 1803 'TEST'.encode(pref)
70a1165b 1804 except Exception:
59ae15a5 1805 pref = 'UTF-8'
bae611f2 1806
59ae15a5 1807 return pref
d77c3dfd 1808
f4bfd65f 1809
181c8655 1810def write_json_file(obj, fn):
1394646a 1811 """ Encode obj as JSON and write it to fn, atomically if possible """
181c8655 1812
92120217 1813 fn = encodeFilename(fn)
61ee5aeb 1814 if sys.version_info < (3, 0) and sys.platform != 'win32':
ec5f6016
JMF
1815 encoding = get_filesystem_encoding()
1816 # os.path.basename returns a bytes object, but NamedTemporaryFile
1817 # will fail if the filename contains non ascii characters unless we
1818 # use a unicode object
1819 path_basename = lambda f: os.path.basename(fn).decode(encoding)
1820 # the same for os.path.dirname
1821 path_dirname = lambda f: os.path.dirname(fn).decode(encoding)
1822 else:
1823 path_basename = os.path.basename
1824 path_dirname = os.path.dirname
1825
73159f99
S
1826 args = {
1827 'suffix': '.tmp',
ec5f6016
JMF
1828 'prefix': path_basename(fn) + '.',
1829 'dir': path_dirname(fn),
73159f99
S
1830 'delete': False,
1831 }
1832
181c8655
PH
1833 # In Python 2.x, json.dump expects a bytestream.
1834 # In Python 3.x, it writes to a character stream
1835 if sys.version_info < (3, 0):
73159f99 1836 args['mode'] = 'wb'
181c8655 1837 else:
73159f99
S
1838 args.update({
1839 'mode': 'w',
1840 'encoding': 'utf-8',
1841 })
1842
c86b6142 1843 tf = tempfile.NamedTemporaryFile(**compat_kwargs(args))
181c8655
PH
1844
1845 try:
1846 with tf:
45d86abe 1847 json.dump(obj, tf, ensure_ascii=False)
1394646a
IK
1848 if sys.platform == 'win32':
1849 # Need to remove existing file on Windows, else os.rename raises
1850 # WindowsError or FileExistsError.
1851 try:
1852 os.unlink(fn)
1853 except OSError:
1854 pass
9cd5f54e
R
1855 try:
1856 mask = os.umask(0)
1857 os.umask(mask)
1858 os.chmod(tf.name, 0o666 & ~mask)
1859 except OSError:
1860 pass
181c8655 1861 os.rename(tf.name, fn)
70a1165b 1862 except Exception:
181c8655
PH
1863 try:
1864 os.remove(tf.name)
1865 except OSError:
1866 pass
1867 raise
1868
1869
1870if sys.version_info >= (2, 7):
ee114368 1871 def find_xpath_attr(node, xpath, key, val=None):
59ae56fa 1872 """ Find the xpath xpath[@key=val] """
5d2354f1 1873 assert re.match(r'^[a-zA-Z_-]+$', key)
ee114368 1874 expr = xpath + ('[@%s]' % key if val is None else "[@%s='%s']" % (key, val))
59ae56fa
PH
1875 return node.find(expr)
1876else:
ee114368 1877 def find_xpath_attr(node, xpath, key, val=None):
810c10ba 1878 for f in node.findall(compat_xpath(xpath)):
ee114368
S
1879 if key not in f.attrib:
1880 continue
1881 if val is None or f.attrib.get(key) == val:
59ae56fa
PH
1882 return f
1883 return None
1884
d7e66d39
JMF
1885# On python2.6 the xml.etree.ElementTree.Element methods don't support
1886# the namespace parameter
5f6a1245
JW
1887
1888
d7e66d39
JMF
1889def xpath_with_ns(path, ns_map):
1890 components = [c.split(':') for c in path.split('/')]
1891 replaced = []
1892 for c in components:
1893 if len(c) == 1:
1894 replaced.append(c[0])
1895 else:
1896 ns, tag = c
1897 replaced.append('{%s}%s' % (ns_map[ns], tag))
1898 return '/'.join(replaced)
1899
d77c3dfd 1900
a41fb80c 1901def xpath_element(node, xpath, name=None, fatal=False, default=NO_DEFAULT):
578c0745 1902 def _find_xpath(xpath):
810c10ba 1903 return node.find(compat_xpath(xpath))
578c0745
S
1904
1905 if isinstance(xpath, (str, compat_str)):
1906 n = _find_xpath(xpath)
1907 else:
1908 for xp in xpath:
1909 n = _find_xpath(xp)
1910 if n is not None:
1911 break
d74bebd5 1912
8e636da4 1913 if n is None:
bf42a990
S
1914 if default is not NO_DEFAULT:
1915 return default
1916 elif fatal:
bf0ff932
PH
1917 name = xpath if name is None else name
1918 raise ExtractorError('Could not find XML element %s' % name)
1919 else:
1920 return None
a41fb80c
S
1921 return n
1922
1923
1924def xpath_text(node, xpath, name=None, fatal=False, default=NO_DEFAULT):
8e636da4
S
1925 n = xpath_element(node, xpath, name, fatal=fatal, default=default)
1926 if n is None or n == default:
1927 return n
1928 if n.text is None:
1929 if default is not NO_DEFAULT:
1930 return default
1931 elif fatal:
1932 name = xpath if name is None else name
1933 raise ExtractorError('Could not find XML element\'s text %s' % name)
1934 else:
1935 return None
1936 return n.text
a41fb80c
S
1937
1938
1939def xpath_attr(node, xpath, key, name=None, fatal=False, default=NO_DEFAULT):
1940 n = find_xpath_attr(node, xpath, key)
1941 if n is None:
1942 if default is not NO_DEFAULT:
1943 return default
1944 elif fatal:
1945 name = '%s[@%s]' % (xpath, key) if name is None else name
1946 raise ExtractorError('Could not find XML attribute %s' % name)
1947 else:
1948 return None
1949 return n.attrib[key]
bf0ff932
PH
1950
1951
9e6dd238 1952def get_element_by_id(id, html):
43e8fafd 1953 """Return the content of the tag with the specified ID in the passed HTML document"""
611c1dd9 1954 return get_element_by_attribute('id', id, html)
43e8fafd 1955
12ea2f30 1956
6f32a0b5
ZM
1957def get_element_html_by_id(id, html):
1958 """Return the html of the tag with the specified ID in the passed HTML document"""
1959 return get_element_html_by_attribute('id', id, html)
1960
1961
84c237fb 1962def get_element_by_class(class_name, html):
2af12ad9
TC
1963 """Return the content of the first tag with the specified class in the passed HTML document"""
1964 retval = get_elements_by_class(class_name, html)
1965 return retval[0] if retval else None
1966
1967
6f32a0b5
ZM
1968def get_element_html_by_class(class_name, html):
1969 """Return the html of the first tag with the specified class in the passed HTML document"""
1970 retval = get_elements_html_by_class(class_name, html)
1971 return retval[0] if retval else None
1972
1973
2af12ad9
TC
1974def get_element_by_attribute(attribute, value, html, escape_value=True):
1975 retval = get_elements_by_attribute(attribute, value, html, escape_value)
1976 return retval[0] if retval else None
1977
1978
6f32a0b5
ZM
1979def get_element_html_by_attribute(attribute, value, html, escape_value=True):
1980 retval = get_elements_html_by_attribute(attribute, value, html, escape_value)
1981 return retval[0] if retval else None
1982
1983
2af12ad9
TC
1984def get_elements_by_class(class_name, html):
1985 """Return the content of all tags with the specified class in the passed HTML document as a list"""
1986 return get_elements_by_attribute(
84c237fb
YCH
1987 'class', r'[^\'"]*\b%s\b[^\'"]*' % re.escape(class_name),
1988 html, escape_value=False)
1989
1990
6f32a0b5
ZM
1991def get_elements_html_by_class(class_name, html):
1992 """Return the html of all tags with the specified class in the passed HTML document as a list"""
1993 return get_elements_html_by_attribute(
1994 'class', r'[^\'"]*\b%s\b[^\'"]*' % re.escape(class_name),
1995 html, escape_value=False)
1996
1997
1998def get_elements_by_attribute(*args, **kwargs):
43e8fafd 1999 """Return the content of the tag with the specified attribute in the passed HTML document"""
6f32a0b5
ZM
2000 return [content for content, _ in get_elements_text_and_html_by_attribute(*args, **kwargs)]
2001
2002
2003def get_elements_html_by_attribute(*args, **kwargs):
2004 """Return the html of the tag with the specified attribute in the passed HTML document"""
2005 return [whole for _, whole in get_elements_text_and_html_by_attribute(*args, **kwargs)]
2006
2007
2008def get_elements_text_and_html_by_attribute(attribute, value, html, escape_value=True):
2009 """
2010 Return the text (content) and the html (whole) of the tag with the specified
2011 attribute in the passed HTML document
2012 """
9e6dd238 2013
0254f162
ZM
2014 value_quote_optional = '' if re.match(r'''[\s"'`=<>]''', value) else '?'
2015
84c237fb
YCH
2016 value = re.escape(value) if escape_value else value
2017
0254f162 2018 partial_element_re = r'''(?x)
6f32a0b5 2019 <(?P<tag>[a-zA-Z0-9:._-]+)
0254f162
ZM
2020 (?:\s(?:[^>"']|"[^"]*"|'[^']*')*)?
2021 \s%(attribute)s\s*=\s*(?P<_q>['"]%(vqo)s)(?-x:%(value)s)(?P=_q)
2022 ''' % {'attribute': re.escape(attribute), 'value': value, 'vqo': value_quote_optional}
38285056 2023
0254f162
ZM
2024 for m in re.finditer(partial_element_re, html):
2025 content, whole = get_element_text_and_html_by_tag(m.group('tag'), html[m.start():])
a921f407 2026
0254f162
ZM
2027 yield (
2028 unescapeHTML(re.sub(r'^(?P<q>["\'])(?P<content>.*)(?P=q)$', r'\g<content>', content, flags=re.DOTALL)),
2029 whole
2030 )
a921f407 2031
c5229f39 2032
6f32a0b5
ZM
2033class HTMLBreakOnClosingTagParser(compat_HTMLParser):
2034 """
2035 HTML parser which raises HTMLBreakOnClosingTagException upon reaching the
2036 closing tag for the first opening tag it has encountered, and can be used
2037 as a context manager
2038 """
2039
2040 class HTMLBreakOnClosingTagException(Exception):
2041 pass
2042
2043 def __init__(self):
2044 self.tagstack = collections.deque()
2045 compat_HTMLParser.__init__(self)
2046
2047 def __enter__(self):
2048 return self
2049
2050 def __exit__(self, *_):
2051 self.close()
2052
2053 def close(self):
2054 # handle_endtag does not return upon raising HTMLBreakOnClosingTagException,
2055 # so data remains buffered; we no longer have any interest in it, thus
2056 # override this method to discard it
2057 pass
2058
2059 def handle_starttag(self, tag, _):
2060 self.tagstack.append(tag)
2061
2062 def handle_endtag(self, tag):
2063 if not self.tagstack:
2064 raise compat_HTMLParseError('no tags in the stack')
2065 while self.tagstack:
2066 inner_tag = self.tagstack.pop()
2067 if inner_tag == tag:
2068 break
2069 else:
2070 raise compat_HTMLParseError(f'matching opening tag for closing {tag} tag not found')
2071 if not self.tagstack:
2072 raise self.HTMLBreakOnClosingTagException()
2073
2074
2075def get_element_text_and_html_by_tag(tag, html):
2076 """
2077 For the first element with the specified tag in the passed HTML document
2078 return its' content (text) and the whole element (html)
2079 """
2080 def find_or_raise(haystack, needle, exc):
2081 try:
2082 return haystack.index(needle)
2083 except ValueError:
2084 raise exc
2085 closing_tag = f'</{tag}>'
2086 whole_start = find_or_raise(
2087 html, f'<{tag}', compat_HTMLParseError(f'opening {tag} tag not found'))
2088 content_start = find_or_raise(
2089 html[whole_start:], '>', compat_HTMLParseError(f'malformed opening {tag} tag'))
2090 content_start += whole_start + 1
2091 with HTMLBreakOnClosingTagParser() as parser:
2092 parser.feed(html[whole_start:content_start])
2093 if not parser.tagstack or parser.tagstack[0] != tag:
2094 raise compat_HTMLParseError(f'parser did not match opening {tag} tag')
2095 offset = content_start
2096 while offset < len(html):
2097 next_closing_tag_start = find_or_raise(
2098 html[offset:], closing_tag,
2099 compat_HTMLParseError(f'closing {tag} tag not found'))
2100 next_closing_tag_end = next_closing_tag_start + len(closing_tag)
2101 try:
2102 parser.feed(html[offset:offset + next_closing_tag_end])
2103 offset += next_closing_tag_end
2104 except HTMLBreakOnClosingTagParser.HTMLBreakOnClosingTagException:
2105 return html[content_start:offset + next_closing_tag_start], \
2106 html[whole_start:offset + next_closing_tag_end]
2107 raise compat_HTMLParseError('unexpected end of html')
2108
2109
8bb56eee
BF
2110class HTMLAttributeParser(compat_HTMLParser):
2111 """Trivial HTML parser to gather the attributes for a single element"""
b6e0c7d2 2112
8bb56eee 2113 def __init__(self):
c5229f39 2114 self.attrs = {}
8bb56eee
BF
2115 compat_HTMLParser.__init__(self)
2116
2117 def handle_starttag(self, tag, attrs):
2118 self.attrs = dict(attrs)
2119
c5229f39 2120
73673ccf
FF
2121class HTMLListAttrsParser(compat_HTMLParser):
2122 """HTML parser to gather the attributes for the elements of a list"""
2123
2124 def __init__(self):
2125 compat_HTMLParser.__init__(self)
2126 self.items = []
2127 self._level = 0
2128
2129 def handle_starttag(self, tag, attrs):
2130 if tag == 'li' and self._level == 0:
2131 self.items.append(dict(attrs))
2132 self._level += 1
2133
2134 def handle_endtag(self, tag):
2135 self._level -= 1
2136
2137
8bb56eee
BF
2138def extract_attributes(html_element):
2139 """Given a string for an HTML element such as
2140 <el
2141 a="foo" B="bar" c="&98;az" d=boz
2142 empty= noval entity="&amp;"
2143 sq='"' dq="'"
2144 >
2145 Decode and return a dictionary of attributes.
2146 {
2147 'a': 'foo', 'b': 'bar', c: 'baz', d: 'boz',
2148 'empty': '', 'noval': None, 'entity': '&',
2149 'sq': '"', 'dq': '\''
2150 }.
2151 NB HTMLParser is stricter in Python 2.6 & 3.2 than in later versions,
2152 but the cases in the unit test will work for all of 2.6, 2.7, 3.2-3.5.
2153 """
2154 parser = HTMLAttributeParser()
b4a3d461
S
2155 try:
2156 parser.feed(html_element)
2157 parser.close()
2158 # Older Python may throw HTMLParseError in case of malformed HTML
2159 except compat_HTMLParseError:
2160 pass
8bb56eee 2161 return parser.attrs
9e6dd238 2162
c5229f39 2163
73673ccf
FF
2164def parse_list(webpage):
2165 """Given a string for an series of HTML <li> elements,
2166 return a dictionary of their attributes"""
2167 parser = HTMLListAttrsParser()
2168 parser.feed(webpage)
2169 parser.close()
2170 return parser.items
2171
2172
9e6dd238 2173def clean_html(html):
59ae15a5 2174 """Clean an HTML snippet into a readable string"""
dd622d7c
PH
2175
2176 if html is None: # Convenience for sanitizing descriptions etc.
2177 return html
2178
59ae15a5
PH
2179 # Newline vs <br />
2180 html = html.replace('\n', ' ')
edd9221c
TF
2181 html = re.sub(r'(?u)\s*<\s*br\s*/?\s*>\s*', '\n', html)
2182 html = re.sub(r'(?u)<\s*/\s*p\s*>\s*<\s*p[^>]*>', '\n', html)
59ae15a5
PH
2183 # Strip html tags
2184 html = re.sub('<.*?>', '', html)
2185 # Replace html entities
2186 html = unescapeHTML(html)
7decf895 2187 return html.strip()
9e6dd238
FV
2188
2189
d77c3dfd 2190def sanitize_open(filename, open_mode):
59ae15a5
PH
2191 """Try to open the given filename, and slightly tweak it if this fails.
2192
2193 Attempts to open the given filename. If this fails, it tries to change
2194 the filename slightly, step by step, until it's either able to open it
2195 or it fails and raises a final exception, like the standard open()
2196 function.
2197
2198 It returns the tuple (stream, definitive_file_name).
2199 """
2200 try:
28e614de 2201 if filename == '-':
59ae15a5
PH
2202 if sys.platform == 'win32':
2203 import msvcrt
2204 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
898280a0 2205 return (sys.stdout.buffer if hasattr(sys.stdout, 'buffer') else sys.stdout, filename)
59ae15a5
PH
2206 stream = open(encodeFilename(filename), open_mode)
2207 return (stream, filename)
2208 except (IOError, OSError) as err:
f45c185f
PH
2209 if err.errno in (errno.EACCES,):
2210 raise
59ae15a5 2211
f45c185f 2212 # In case of error, try to remove win32 forbidden chars
d55de57b 2213 alt_filename = sanitize_path(filename)
f45c185f
PH
2214 if alt_filename == filename:
2215 raise
2216 else:
2217 # An exception here should be caught in the caller
d55de57b 2218 stream = open(encodeFilename(alt_filename), open_mode)
f45c185f 2219 return (stream, alt_filename)
d77c3dfd
FV
2220
2221
2222def timeconvert(timestr):
59ae15a5
PH
2223 """Convert RFC 2822 defined time string into system timestamp"""
2224 timestamp = None
2225 timetuple = email.utils.parsedate_tz(timestr)
2226 if timetuple is not None:
2227 timestamp = email.utils.mktime_tz(timetuple)
2228 return timestamp
1c469a94 2229
5f6a1245 2230
796173d0 2231def sanitize_filename(s, restricted=False, is_id=False):
59ae15a5
PH
2232 """Sanitizes a string so it could be used as part of a filename.
2233 If restricted is set, use a stricter subset of allowed characters.
158af524
S
2234 Set is_id if this is not an arbitrary string, but an ID that should be kept
2235 if possible.
59ae15a5
PH
2236 """
2237 def replace_insane(char):
c587cbb7
AT
2238 if restricted and char in ACCENT_CHARS:
2239 return ACCENT_CHARS[char]
91dd88b9 2240 elif not restricted and char == '\n':
2241 return ' '
2242 elif char == '?' or ord(char) < 32 or ord(char) == 127:
59ae15a5
PH
2243 return ''
2244 elif char == '"':
2245 return '' if restricted else '\''
2246 elif char == ':':
2247 return '_-' if restricted else ' -'
2248 elif char in '\\/|*<>':
2249 return '_'
627dcfff 2250 if restricted and (char in '!&\'()[]{}$;`^,#' or char.isspace()):
59ae15a5
PH
2251 return '_'
2252 if restricted and ord(char) > 127:
2253 return '_'
2254 return char
2255
639f1cea 2256 if s == '':
2257 return ''
2aeb06d6
PH
2258 # Handle timestamps
2259 s = re.sub(r'[0-9]+(?::[0-9]+)+', lambda m: m.group(0).replace(':', '_'), s)
28e614de 2260 result = ''.join(map(replace_insane, s))
796173d0
PH
2261 if not is_id:
2262 while '__' in result:
2263 result = result.replace('__', '_')
2264 result = result.strip('_')
2265 # Common case of "Foreign band name - English song title"
2266 if restricted and result.startswith('-_'):
2267 result = result[2:]
5a42414b
PH
2268 if result.startswith('-'):
2269 result = '_' + result[len('-'):]
a7440261 2270 result = result.lstrip('.')
796173d0
PH
2271 if not result:
2272 result = '_'
59ae15a5 2273 return result
d77c3dfd 2274
5f6a1245 2275
c2934512 2276def sanitize_path(s, force=False):
a2aaf4db 2277 """Sanitizes and normalizes path on Windows"""
c2934512 2278 if sys.platform == 'win32':
c4218ac3 2279 force = False
c2934512 2280 drive_or_unc, _ = os.path.splitdrive(s)
2281 if sys.version_info < (2, 7) and not drive_or_unc:
2282 drive_or_unc, _ = os.path.splitunc(s)
2283 elif force:
2284 drive_or_unc = ''
2285 else:
a2aaf4db 2286 return s
c2934512 2287
be531ef1
S
2288 norm_path = os.path.normpath(remove_start(s, drive_or_unc)).split(os.path.sep)
2289 if drive_or_unc:
a2aaf4db
S
2290 norm_path.pop(0)
2291 sanitized_path = [
ec85ded8 2292 path_part if path_part in ['.', '..'] else re.sub(r'(?:[/<>:"\|\\?\*]|[\s.]$)', '#', path_part)
a2aaf4db 2293 for path_part in norm_path]
be531ef1
S
2294 if drive_or_unc:
2295 sanitized_path.insert(0, drive_or_unc + os.path.sep)
c4218ac3 2296 elif force and s[0] == os.path.sep:
2297 sanitized_path.insert(0, os.path.sep)
a2aaf4db
S
2298 return os.path.join(*sanitized_path)
2299
2300
17bcc626 2301def sanitize_url(url):
befa4708
S
2302 # Prepend protocol-less URLs with `http:` scheme in order to mitigate
2303 # the number of unwanted failures due to missing protocol
2304 if url.startswith('//'):
2305 return 'http:%s' % url
2306 # Fix some common typos seen so far
2307 COMMON_TYPOS = (
067aa17e 2308 # https://github.com/ytdl-org/youtube-dl/issues/15649
befa4708
S
2309 (r'^httpss://', r'https://'),
2310 # https://bx1.be/lives/direct-tv/
2311 (r'^rmtp([es]?)://', r'rtmp\1://'),
2312 )
2313 for mistake, fixup in COMMON_TYPOS:
2314 if re.match(mistake, url):
2315 return re.sub(mistake, fixup, url)
bc6b9bcd 2316 return url
17bcc626
S
2317
2318
5435dcf9
HH
2319def extract_basic_auth(url):
2320 parts = compat_urlparse.urlsplit(url)
2321 if parts.username is None:
2322 return url, None
2323 url = compat_urlparse.urlunsplit(parts._replace(netloc=(
2324 parts.hostname if parts.port is None
2325 else '%s:%d' % (parts.hostname, parts.port))))
2326 auth_payload = base64.b64encode(
2327 ('%s:%s' % (parts.username, parts.password or '')).encode('utf-8'))
2328 return url, 'Basic ' + auth_payload.decode('utf-8')
2329
2330
67dda517 2331def sanitized_Request(url, *args, **kwargs):
bc6b9bcd 2332 url, auth_header = extract_basic_auth(escape_url(sanitize_url(url)))
5435dcf9
HH
2333 if auth_header is not None:
2334 headers = args[1] if len(args) >= 2 else kwargs.setdefault('headers', {})
2335 headers['Authorization'] = auth_header
2336 return compat_urllib_request.Request(url, *args, **kwargs)
67dda517
S
2337
2338
51098426
S
2339def expand_path(s):
2340 """Expand shell variables and ~"""
2341 return os.path.expandvars(compat_expanduser(s))
2342
2343
d77c3dfd 2344def orderedSet(iterable):
59ae15a5
PH
2345 """ Remove all duplicates from the input iterable """
2346 res = []
2347 for el in iterable:
2348 if el not in res:
2349 res.append(el)
2350 return res
d77c3dfd 2351
912b38b4 2352
55b2f099 2353def _htmlentity_transform(entity_with_semicolon):
4e408e47 2354 """Transforms an HTML entity to a character."""
55b2f099
YCH
2355 entity = entity_with_semicolon[:-1]
2356
4e408e47
PH
2357 # Known non-numeric HTML entity
2358 if entity in compat_html_entities.name2codepoint:
2359 return compat_chr(compat_html_entities.name2codepoint[entity])
2360
55b2f099
YCH
2361 # TODO: HTML5 allows entities without a semicolon. For example,
2362 # '&Eacuteric' should be decoded as 'Éric'.
2363 if entity_with_semicolon in compat_html_entities_html5:
2364 return compat_html_entities_html5[entity_with_semicolon]
2365
91757b0f 2366 mobj = re.match(r'#(x[0-9a-fA-F]+|[0-9]+)', entity)
4e408e47
PH
2367 if mobj is not None:
2368 numstr = mobj.group(1)
28e614de 2369 if numstr.startswith('x'):
4e408e47 2370 base = 16
28e614de 2371 numstr = '0%s' % numstr
4e408e47
PH
2372 else:
2373 base = 10
067aa17e 2374 # See https://github.com/ytdl-org/youtube-dl/issues/7518
7aefc49c
S
2375 try:
2376 return compat_chr(int(numstr, base))
2377 except ValueError:
2378 pass
4e408e47
PH
2379
2380 # Unknown entity in name, return its literal representation
7a3f0c00 2381 return '&%s;' % entity
4e408e47
PH
2382
2383
d77c3dfd 2384def unescapeHTML(s):
912b38b4
PH
2385 if s is None:
2386 return None
2387 assert type(s) == compat_str
d77c3dfd 2388
4e408e47 2389 return re.sub(
95f3f7c2 2390 r'&([^&;]+;)', lambda m: _htmlentity_transform(m.group(1)), s)
d77c3dfd 2391
8bf48f23 2392
cdb19aa4 2393def escapeHTML(text):
2394 return (
2395 text
2396 .replace('&', '&amp;')
2397 .replace('<', '&lt;')
2398 .replace('>', '&gt;')
2399 .replace('"', '&quot;')
2400 .replace("'", '&#39;')
2401 )
2402
2403
f5b1bca9 2404def process_communicate_or_kill(p, *args, **kwargs):
2405 try:
2406 return p.communicate(*args, **kwargs)
2407 except BaseException: # Including KeyboardInterrupt
2408 p.kill()
2409 p.wait()
2410 raise
2411
2412
d3c93ec2 2413class Popen(subprocess.Popen):
2414 if sys.platform == 'win32':
2415 _startupinfo = subprocess.STARTUPINFO()
2416 _startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
2417 else:
2418 _startupinfo = None
2419
2420 def __init__(self, *args, **kwargs):
2421 super(Popen, self).__init__(*args, **kwargs, startupinfo=self._startupinfo)
2422
2423 def communicate_or_kill(self, *args, **kwargs):
2424 return process_communicate_or_kill(self, *args, **kwargs)
2425
2426
aa49acd1
S
2427def get_subprocess_encoding():
2428 if sys.platform == 'win32' and sys.getwindowsversion()[0] >= 5:
2429 # For subprocess calls, encode with locale encoding
2430 # Refer to http://stackoverflow.com/a/9951851/35070
2431 encoding = preferredencoding()
2432 else:
2433 encoding = sys.getfilesystemencoding()
2434 if encoding is None:
2435 encoding = 'utf-8'
2436 return encoding
2437
2438
8bf48f23 2439def encodeFilename(s, for_subprocess=False):
59ae15a5
PH
2440 """
2441 @param s The name of the file
2442 """
d77c3dfd 2443
8bf48f23 2444 assert type(s) == compat_str
d77c3dfd 2445
59ae15a5
PH
2446 # Python 3 has a Unicode API
2447 if sys.version_info >= (3, 0):
2448 return s
0f00efed 2449
aa49acd1
S
2450 # Pass '' directly to use Unicode APIs on Windows 2000 and up
2451 # (Detecting Windows NT 4 is tricky because 'major >= 4' would
2452 # match Windows 9x series as well. Besides, NT 4 is obsolete.)
2453 if not for_subprocess and sys.platform == 'win32' and sys.getwindowsversion()[0] >= 5:
2454 return s
2455
8ee239e9
YCH
2456 # Jython assumes filenames are Unicode strings though reported as Python 2.x compatible
2457 if sys.platform.startswith('java'):
2458 return s
2459
aa49acd1
S
2460 return s.encode(get_subprocess_encoding(), 'ignore')
2461
2462
2463def decodeFilename(b, for_subprocess=False):
2464
2465 if sys.version_info >= (3, 0):
2466 return b
2467
2468 if not isinstance(b, bytes):
2469 return b
2470
2471 return b.decode(get_subprocess_encoding(), 'ignore')
8bf48f23 2472
f07b74fc
PH
2473
2474def encodeArgument(s):
2475 if not isinstance(s, compat_str):
2476 # Legacy code that uses byte strings
2477 # Uncomment the following line after fixing all post processors
7af808a5 2478 # assert False, 'Internal error: %r should be of type %r, is %r' % (s, compat_str, type(s))
f07b74fc
PH
2479 s = s.decode('ascii')
2480 return encodeFilename(s, True)
2481
2482
aa49acd1
S
2483def decodeArgument(b):
2484 return decodeFilename(b, True)
2485
2486
8271226a
PH
2487def decodeOption(optval):
2488 if optval is None:
2489 return optval
2490 if isinstance(optval, bytes):
2491 optval = optval.decode(preferredencoding())
2492
2493 assert isinstance(optval, compat_str)
2494 return optval
1c256f70 2495
5f6a1245 2496
aa7785f8 2497_timetuple = collections.namedtuple('Time', ('hours', 'minutes', 'seconds', 'milliseconds'))
2498
2499
2500def timetuple_from_msec(msec):
2501 secs, msec = divmod(msec, 1000)
2502 mins, secs = divmod(secs, 60)
2503 hrs, mins = divmod(mins, 60)
2504 return _timetuple(hrs, mins, secs, msec)
2505
2506
cdb19aa4 2507def formatSeconds(secs, delim=':', msec=False):
aa7785f8 2508 time = timetuple_from_msec(secs * 1000)
2509 if time.hours:
2510 ret = '%d%s%02d%s%02d' % (time.hours, delim, time.minutes, delim, time.seconds)
2511 elif time.minutes:
2512 ret = '%d%s%02d' % (time.minutes, delim, time.seconds)
4539dd30 2513 else:
aa7785f8 2514 ret = '%d' % time.seconds
2515 return '%s.%03d' % (ret, time.milliseconds) if msec else ret
4539dd30 2516
a0ddb8a2 2517
77562778 2518def _ssl_load_windows_store_certs(ssl_context, storename):
2519 # Code adapted from _load_windows_store_certs in https://github.com/python/cpython/blob/main/Lib/ssl.py
2520 try:
2521 certs = [cert for cert, encoding, trust in ssl.enum_certificates(storename)
2522 if encoding == 'x509_asn' and (
2523 trust is True or ssl.Purpose.SERVER_AUTH.oid in trust)]
2524 except PermissionError:
2525 return
2526 for cert in certs:
a2366922 2527 try:
77562778 2528 ssl_context.load_verify_locations(cadata=cert)
2529 except ssl.SSLError:
a2366922
PH
2530 pass
2531
77562778 2532
2533def make_HTTPS_handler(params, **kwargs):
2534 opts_check_certificate = not params.get('nocheckcertificate')
2535 context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
2536 context.check_hostname = opts_check_certificate
2537 context.verify_mode = ssl.CERT_REQUIRED if opts_check_certificate else ssl.CERT_NONE
2538 if opts_check_certificate:
4e3d1898 2539 try:
2540 context.load_default_certs()
2541 # Work around the issue in load_default_certs when there are bad certificates. See:
2542 # https://github.com/yt-dlp/yt-dlp/issues/1060,
2543 # https://bugs.python.org/issue35665, https://bugs.python.org/issue45312
2544 except ssl.SSLError:
2545 # enum_certificates is not present in mingw python. See https://github.com/yt-dlp/yt-dlp/issues/1151
2546 if sys.platform == 'win32' and hasattr(ssl, 'enum_certificates'):
2547 # Create a new context to discard any certificates that were already loaded
2548 context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
2549 context.check_hostname, context.verify_mode = True, ssl.CERT_REQUIRED
2550 for storename in ('CA', 'ROOT'):
2551 _ssl_load_windows_store_certs(context, storename)
2552 context.set_default_verify_paths()
77562778 2553 return YoutubeDLHTTPSHandler(params, context=context, **kwargs)
ea6d901e 2554
732ea2f0 2555
5873d4cc 2556def bug_reports_message(before=';'):
08f2a92c 2557 if ytdl_is_updateable():
7a5c1cfe 2558 update_cmd = 'type yt-dlp -U to update'
08f2a92c 2559 else:
7a5c1cfe 2560 update_cmd = 'see https://github.com/yt-dlp/yt-dlp on how to update'
5873d4cc 2561 msg = 'please report this issue on https://github.com/yt-dlp/yt-dlp .'
08f2a92c 2562 msg += ' Make sure you are using the latest version; %s.' % update_cmd
7a5c1cfe 2563 msg += ' Be sure to call yt-dlp with the --verbose flag and include its complete output.'
5873d4cc
F
2564
2565 before = before.rstrip()
2566 if not before or before.endswith(('.', '!', '?')):
2567 msg = msg[0].title() + msg[1:]
2568
2569 return (before + ' ' if before else '') + msg
08f2a92c
JMF
2570
2571
bf5b9d85
PM
2572class YoutubeDLError(Exception):
2573 """Base exception for YoutubeDL errors."""
aa9369a2 2574 msg = None
2575
2576 def __init__(self, msg=None):
2577 if msg is not None:
2578 self.msg = msg
2579 elif self.msg is None:
2580 self.msg = type(self).__name__
2581 super().__init__(self.msg)
bf5b9d85
PM
2582
2583
3158150c 2584network_exceptions = [compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error]
2585if hasattr(ssl, 'CertificateError'):
2586 network_exceptions.append(ssl.CertificateError)
2587network_exceptions = tuple(network_exceptions)
2588
2589
bf5b9d85 2590class ExtractorError(YoutubeDLError):
1c256f70 2591 """Error during info extraction."""
5f6a1245 2592
1151c407 2593 def __init__(self, msg, tb=None, expected=False, cause=None, video_id=None, ie=None):
9a82b238 2594 """ tb, if given, is the original traceback (so that it can be printed out).
7a5c1cfe 2595 If expected is set, this is a normal error message and most likely not a bug in yt-dlp.
9a82b238 2596 """
3158150c 2597 if sys.exc_info()[0] in network_exceptions:
9a82b238 2598 expected = True
d5979c5d 2599
526d74ec 2600 self.msg = str(msg)
1c256f70 2601 self.traceback = tb
1151c407 2602 self.expected = expected
2eabb802 2603 self.cause = cause
d11271dd 2604 self.video_id = video_id
1151c407 2605 self.ie = ie
2606 self.exc_info = sys.exc_info() # preserve original exception
2607
2608 super(ExtractorError, self).__init__(''.join((
2609 format_field(ie, template='[%s] '),
2610 format_field(video_id, template='%s: '),
526d74ec 2611 self.msg,
1151c407 2612 format_field(cause, template=' (caused by %r)'),
2613 '' if expected else bug_reports_message())))
1c256f70 2614
01951dda
PH
2615 def format_traceback(self):
2616 if self.traceback is None:
2617 return None
28e614de 2618 return ''.join(traceback.format_tb(self.traceback))
01951dda 2619
1c256f70 2620
416c7fcb
PH
2621class UnsupportedError(ExtractorError):
2622 def __init__(self, url):
2623 super(UnsupportedError, self).__init__(
2624 'Unsupported URL: %s' % url, expected=True)
2625 self.url = url
2626
2627
55b3e45b
JMF
2628class RegexNotFoundError(ExtractorError):
2629 """Error when a regex didn't match"""
2630 pass
2631
2632
773f291d
S
2633class GeoRestrictedError(ExtractorError):
2634 """Geographic restriction Error exception.
2635
2636 This exception may be thrown when a video is not available from your
2637 geographic location due to geographic restrictions imposed by a website.
2638 """
b6e0c7d2 2639
0db3bae8 2640 def __init__(self, msg, countries=None, **kwargs):
2641 kwargs['expected'] = True
2642 super(GeoRestrictedError, self).__init__(msg, **kwargs)
773f291d
S
2643 self.countries = countries
2644
2645
bf5b9d85 2646class DownloadError(YoutubeDLError):
59ae15a5 2647 """Download Error exception.
d77c3dfd 2648
59ae15a5
PH
2649 This exception may be thrown by FileDownloader objects if they are not
2650 configured to continue on errors. They will contain the appropriate
2651 error message.
2652 """
5f6a1245 2653
8cc83b8d
FV
2654 def __init__(self, msg, exc_info=None):
2655 """ exc_info, if given, is the original exception that caused the trouble (as returned by sys.exc_info()). """
2656 super(DownloadError, self).__init__(msg)
2657 self.exc_info = exc_info
d77c3dfd
FV
2658
2659
498f5606 2660class EntryNotInPlaylist(YoutubeDLError):
2661 """Entry not in playlist exception.
2662
2663 This exception will be thrown by YoutubeDL when a requested entry
2664 is not found in the playlist info_dict
2665 """
aa9369a2 2666 msg = 'Entry not found in info'
498f5606 2667
2668
bf5b9d85 2669class SameFileError(YoutubeDLError):
59ae15a5 2670 """Same File exception.
d77c3dfd 2671
59ae15a5
PH
2672 This exception will be thrown by FileDownloader objects if they detect
2673 multiple files would have to be downloaded to the same file on disk.
2674 """
aa9369a2 2675 msg = 'Fixed output name but more than one file to download'
2676
2677 def __init__(self, filename=None):
2678 if filename is not None:
2679 self.msg += f': {filename}'
2680 super().__init__(self.msg)
d77c3dfd
FV
2681
2682
bf5b9d85 2683class PostProcessingError(YoutubeDLError):
59ae15a5 2684 """Post Processing exception.
d77c3dfd 2685
59ae15a5
PH
2686 This exception may be raised by PostProcessor's .run() method to
2687 indicate an error in the postprocessing task.
2688 """
5f6a1245 2689
5f6a1245 2690
48f79687 2691class DownloadCancelled(YoutubeDLError):
2692 """ Exception raised when the download queue should be interrupted """
2693 msg = 'The download was cancelled'
8b0d7497 2694
8b0d7497 2695
48f79687 2696class ExistingVideoReached(DownloadCancelled):
2697 """ --break-on-existing triggered """
2698 msg = 'Encountered a video that is already in the archive, stopping due to --break-on-existing'
8b0d7497 2699
48f79687 2700
2701class RejectedVideoReached(DownloadCancelled):
2702 """ --break-on-reject triggered """
2703 msg = 'Encountered a video that did not match filter, stopping due to --break-on-reject'
51d9739f 2704
2705
48f79687 2706class MaxDownloadsReached(DownloadCancelled):
59ae15a5 2707 """ --max-downloads limit has been reached. """
48f79687 2708 msg = 'Maximum number of downloads reached, stopping due to --max-downloads'
2709
2710
f2ebc5c7 2711class ReExtractInfo(YoutubeDLError):
2712 """ Video info needs to be re-extracted. """
2713
2714 def __init__(self, msg, expected=False):
2715 super().__init__(msg)
2716 self.expected = expected
2717
2718
2719class ThrottledDownload(ReExtractInfo):
48f79687 2720 """ Download speed below --throttled-rate. """
aa9369a2 2721 msg = 'The download speed is below throttle limit'
d77c3dfd 2722
43b22906 2723 def __init__(self):
2724 super().__init__(self.msg, expected=False)
f2ebc5c7 2725
d77c3dfd 2726
bf5b9d85 2727class UnavailableVideoError(YoutubeDLError):
59ae15a5 2728 """Unavailable Format exception.
d77c3dfd 2729
59ae15a5
PH
2730 This exception will be thrown when a video is requested
2731 in a format that is not available for that video.
2732 """
aa9369a2 2733 msg = 'Unable to download video'
2734
2735 def __init__(self, err=None):
2736 if err is not None:
2737 self.msg += f': {err}'
2738 super().__init__(self.msg)
d77c3dfd
FV
2739
2740
bf5b9d85 2741class ContentTooShortError(YoutubeDLError):
59ae15a5 2742 """Content Too Short exception.
d77c3dfd 2743
59ae15a5
PH
2744 This exception may be raised by FileDownloader objects when a file they
2745 download is too small for what the server announced first, indicating
2746 the connection was probably interrupted.
2747 """
d77c3dfd 2748
59ae15a5 2749 def __init__(self, downloaded, expected):
bf5b9d85
PM
2750 super(ContentTooShortError, self).__init__(
2751 'Downloaded {0} bytes, expected {1} bytes'.format(downloaded, expected)
2752 )
2c7ed247 2753 # Both in bytes
59ae15a5
PH
2754 self.downloaded = downloaded
2755 self.expected = expected
d77c3dfd 2756
5f6a1245 2757
bf5b9d85 2758class XAttrMetadataError(YoutubeDLError):
efa97bdc
YCH
2759 def __init__(self, code=None, msg='Unknown error'):
2760 super(XAttrMetadataError, self).__init__(msg)
2761 self.code = code
bd264412 2762 self.msg = msg
efa97bdc
YCH
2763
2764 # Parsing code and msg
3089bc74 2765 if (self.code in (errno.ENOSPC, errno.EDQUOT)
a0566bbf 2766 or 'No space left' in self.msg or 'Disk quota exceeded' in self.msg):
efa97bdc
YCH
2767 self.reason = 'NO_SPACE'
2768 elif self.code == errno.E2BIG or 'Argument list too long' in self.msg:
2769 self.reason = 'VALUE_TOO_LONG'
2770 else:
2771 self.reason = 'NOT_SUPPORTED'
2772
2773
bf5b9d85 2774class XAttrUnavailableError(YoutubeDLError):
efa97bdc
YCH
2775 pass
2776
2777
c5a59d93 2778def _create_http_connection(ydl_handler, http_class, is_https, *args, **kwargs):
e5e78797
S
2779 # Working around python 2 bug (see http://bugs.python.org/issue17849) by limiting
2780 # expected HTTP responses to meet HTTP/1.0 or later (see also
067aa17e 2781 # https://github.com/ytdl-org/youtube-dl/issues/6727)
e5e78797 2782 if sys.version_info < (3, 0):
65220c3b
S
2783 kwargs['strict'] = True
2784 hc = http_class(*args, **compat_kwargs(kwargs))
be4a824d 2785 source_address = ydl_handler._params.get('source_address')
8959018a 2786
be4a824d 2787 if source_address is not None:
8959018a
AU
2788 # This is to workaround _create_connection() from socket where it will try all
2789 # address data from getaddrinfo() including IPv6. This filters the result from
2790 # getaddrinfo() based on the source_address value.
2791 # This is based on the cpython socket.create_connection() function.
2792 # https://github.com/python/cpython/blob/master/Lib/socket.py#L691
2793 def _create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None):
2794 host, port = address
2795 err = None
2796 addrs = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
9e21e6d9
S
2797 af = socket.AF_INET if '.' in source_address[0] else socket.AF_INET6
2798 ip_addrs = [addr for addr in addrs if addr[0] == af]
2799 if addrs and not ip_addrs:
2800 ip_version = 'v4' if af == socket.AF_INET else 'v6'
2801 raise socket.error(
2802 "No remote IP%s addresses available for connect, can't use '%s' as source address"
2803 % (ip_version, source_address[0]))
8959018a
AU
2804 for res in ip_addrs:
2805 af, socktype, proto, canonname, sa = res
2806 sock = None
2807 try:
2808 sock = socket.socket(af, socktype, proto)
2809 if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
2810 sock.settimeout(timeout)
2811 sock.bind(source_address)
2812 sock.connect(sa)
2813 err = None # Explicitly break reference cycle
2814 return sock
2815 except socket.error as _:
2816 err = _
2817 if sock is not None:
2818 sock.close()
2819 if err is not None:
2820 raise err
2821 else:
9e21e6d9
S
2822 raise socket.error('getaddrinfo returns an empty list')
2823 if hasattr(hc, '_create_connection'):
2824 hc._create_connection = _create_connection
be4a824d
PH
2825 sa = (source_address, 0)
2826 if hasattr(hc, 'source_address'): # Python 2.7+
2827 hc.source_address = sa
2828 else: # Python 2.6
2829 def _hc_connect(self, *args, **kwargs):
9e21e6d9 2830 sock = _create_connection(
be4a824d
PH
2831 (self.host, self.port), self.timeout, sa)
2832 if is_https:
d7932313
PH
2833 self.sock = ssl.wrap_socket(
2834 sock, self.key_file, self.cert_file,
2835 ssl_version=ssl.PROTOCOL_TLSv1)
be4a824d
PH
2836 else:
2837 self.sock = sock
2838 hc.connect = functools.partial(_hc_connect, hc)
2839
2840 return hc
2841
2842
87f0e62d 2843def handle_youtubedl_headers(headers):
992fc9d6
YCH
2844 filtered_headers = headers
2845
2846 if 'Youtubedl-no-compression' in filtered_headers:
2847 filtered_headers = dict((k, v) for k, v in filtered_headers.items() if k.lower() != 'accept-encoding')
87f0e62d 2848 del filtered_headers['Youtubedl-no-compression']
87f0e62d 2849
992fc9d6 2850 return filtered_headers
87f0e62d
YCH
2851
2852
acebc9cd 2853class YoutubeDLHandler(compat_urllib_request.HTTPHandler):
59ae15a5
PH
2854 """Handler for HTTP requests and responses.
2855
2856 This class, when installed with an OpenerDirector, automatically adds
2857 the standard headers to every HTTP request and handles gzipped and
2858 deflated responses from web servers. If compression is to be avoided in
2859 a particular request, the original request in the program code only has
0424ec30 2860 to include the HTTP header "Youtubedl-no-compression", which will be
59ae15a5
PH
2861 removed before making the real request.
2862
2863 Part of this code was copied from:
2864
2865 http://techknack.net/python-urllib2-handlers/
2866
2867 Andrew Rowls, the author of that code, agreed to release it to the
2868 public domain.
2869 """
2870
be4a824d
PH
2871 def __init__(self, params, *args, **kwargs):
2872 compat_urllib_request.HTTPHandler.__init__(self, *args, **kwargs)
2873 self._params = params
2874
2875 def http_open(self, req):
71aff188
YCH
2876 conn_class = compat_http_client.HTTPConnection
2877
2878 socks_proxy = req.headers.get('Ytdl-socks-proxy')
2879 if socks_proxy:
2880 conn_class = make_socks_conn_class(conn_class, socks_proxy)
2881 del req.headers['Ytdl-socks-proxy']
2882
be4a824d 2883 return self.do_open(functools.partial(
71aff188 2884 _create_http_connection, self, conn_class, False),
be4a824d
PH
2885 req)
2886
59ae15a5
PH
2887 @staticmethod
2888 def deflate(data):
fc2119f2 2889 if not data:
2890 return data
59ae15a5
PH
2891 try:
2892 return zlib.decompress(data, -zlib.MAX_WBITS)
2893 except zlib.error:
2894 return zlib.decompress(data)
2895
acebc9cd 2896 def http_request(self, req):
51f267d9
S
2897 # According to RFC 3986, URLs can not contain non-ASCII characters, however this is not
2898 # always respected by websites, some tend to give out URLs with non percent-encoded
2899 # non-ASCII characters (see telemb.py, ard.py [#3412])
2900 # urllib chokes on URLs with non-ASCII characters (see http://bugs.python.org/issue3991)
2901 # To work around aforementioned issue we will replace request's original URL with
2902 # percent-encoded one
2903 # Since redirects are also affected (e.g. http://www.southpark.de/alle-episoden/s18e09)
2904 # the code of this workaround has been moved here from YoutubeDL.urlopen()
2905 url = req.get_full_url()
2906 url_escaped = escape_url(url)
2907
2908 # Substitute URL if any change after escaping
2909 if url != url_escaped:
15d260eb 2910 req = update_Request(req, url=url_escaped)
51f267d9 2911
33ac271b 2912 for h, v in std_headers.items():
3d5f7a39
JK
2913 # Capitalize is needed because of Python bug 2275: http://bugs.python.org/issue2275
2914 # The dict keys are capitalized because of this bug by urllib
2915 if h.capitalize() not in req.headers:
33ac271b 2916 req.add_header(h, v)
87f0e62d
YCH
2917
2918 req.headers = handle_youtubedl_headers(req.headers)
989b4b2b
PH
2919
2920 if sys.version_info < (2, 7) and '#' in req.get_full_url():
2921 # Python 2.6 is brain-dead when it comes to fragments
2922 req._Request__original = req._Request__original.partition('#')[0]
2923 req._Request__r_type = req._Request__r_type.partition('#')[0]
2924
59ae15a5
PH
2925 return req
2926
acebc9cd 2927 def http_response(self, req, resp):
59ae15a5
PH
2928 old_resp = resp
2929 # gzip
2930 if resp.headers.get('Content-encoding', '') == 'gzip':
aa3e9507
PH
2931 content = resp.read()
2932 gz = gzip.GzipFile(fileobj=io.BytesIO(content), mode='rb')
2933 try:
2934 uncompressed = io.BytesIO(gz.read())
2935 except IOError as original_ioerror:
2936 # There may be junk add the end of the file
2937 # See http://stackoverflow.com/q/4928560/35070 for details
2938 for i in range(1, 1024):
2939 try:
2940 gz = gzip.GzipFile(fileobj=io.BytesIO(content[:-i]), mode='rb')
2941 uncompressed = io.BytesIO(gz.read())
2942 except IOError:
2943 continue
2944 break
2945 else:
2946 raise original_ioerror
b407d853 2947 resp = compat_urllib_request.addinfourl(uncompressed, old_resp.headers, old_resp.url, old_resp.code)
59ae15a5 2948 resp.msg = old_resp.msg
c047270c 2949 del resp.headers['Content-encoding']
59ae15a5
PH
2950 # deflate
2951 if resp.headers.get('Content-encoding', '') == 'deflate':
2952 gz = io.BytesIO(self.deflate(resp.read()))
b407d853 2953 resp = compat_urllib_request.addinfourl(gz, old_resp.headers, old_resp.url, old_resp.code)
59ae15a5 2954 resp.msg = old_resp.msg
c047270c 2955 del resp.headers['Content-encoding']
ad729172 2956 # Percent-encode redirect URL of Location HTTP header to satisfy RFC 3986 (see
067aa17e 2957 # https://github.com/ytdl-org/youtube-dl/issues/6457).
5a4d9ddb
S
2958 if 300 <= resp.code < 400:
2959 location = resp.headers.get('Location')
2960 if location:
2961 # As of RFC 2616 default charset is iso-8859-1 that is respected by python 3
2962 if sys.version_info >= (3, 0):
2963 location = location.encode('iso-8859-1').decode('utf-8')
0ea59007
YCH
2964 else:
2965 location = location.decode('utf-8')
5a4d9ddb
S
2966 location_escaped = escape_url(location)
2967 if location != location_escaped:
2968 del resp.headers['Location']
9a4aec8b
YCH
2969 if sys.version_info < (3, 0):
2970 location_escaped = location_escaped.encode('utf-8')
5a4d9ddb 2971 resp.headers['Location'] = location_escaped
59ae15a5 2972 return resp
0f8d03f8 2973
acebc9cd
PH
2974 https_request = http_request
2975 https_response = http_response
bf50b038 2976
5de90176 2977
71aff188
YCH
2978def make_socks_conn_class(base_class, socks_proxy):
2979 assert issubclass(base_class, (
2980 compat_http_client.HTTPConnection, compat_http_client.HTTPSConnection))
2981
2982 url_components = compat_urlparse.urlparse(socks_proxy)
2983 if url_components.scheme.lower() == 'socks5':
2984 socks_type = ProxyType.SOCKS5
2985 elif url_components.scheme.lower() in ('socks', 'socks4'):
2986 socks_type = ProxyType.SOCKS4
51fb4995
YCH
2987 elif url_components.scheme.lower() == 'socks4a':
2988 socks_type = ProxyType.SOCKS4A
71aff188 2989
cdd94c2e
YCH
2990 def unquote_if_non_empty(s):
2991 if not s:
2992 return s
2993 return compat_urllib_parse_unquote_plus(s)
2994
71aff188
YCH
2995 proxy_args = (
2996 socks_type,
2997 url_components.hostname, url_components.port or 1080,
2998 True, # Remote DNS
cdd94c2e
YCH
2999 unquote_if_non_empty(url_components.username),
3000 unquote_if_non_empty(url_components.password),
71aff188
YCH
3001 )
3002
3003 class SocksConnection(base_class):
3004 def connect(self):
3005 self.sock = sockssocket()
3006 self.sock.setproxy(*proxy_args)
3007 if type(self.timeout) in (int, float):
3008 self.sock.settimeout(self.timeout)
3009 self.sock.connect((self.host, self.port))
3010
3011 if isinstance(self, compat_http_client.HTTPSConnection):
3012 if hasattr(self, '_context'): # Python > 2.6
3013 self.sock = self._context.wrap_socket(
3014 self.sock, server_hostname=self.host)
3015 else:
3016 self.sock = ssl.wrap_socket(self.sock)
3017
3018 return SocksConnection
3019
3020
be4a824d
PH
3021class YoutubeDLHTTPSHandler(compat_urllib_request.HTTPSHandler):
3022 def __init__(self, params, https_conn_class=None, *args, **kwargs):
3023 compat_urllib_request.HTTPSHandler.__init__(self, *args, **kwargs)
3024 self._https_conn_class = https_conn_class or compat_http_client.HTTPSConnection
3025 self._params = params
3026
3027 def https_open(self, req):
4f264c02 3028 kwargs = {}
71aff188
YCH
3029 conn_class = self._https_conn_class
3030
4f264c02
JMF
3031 if hasattr(self, '_context'): # python > 2.6
3032 kwargs['context'] = self._context
3033 if hasattr(self, '_check_hostname'): # python 3.x
3034 kwargs['check_hostname'] = self._check_hostname
71aff188
YCH
3035
3036 socks_proxy = req.headers.get('Ytdl-socks-proxy')
3037 if socks_proxy:
3038 conn_class = make_socks_conn_class(conn_class, socks_proxy)
3039 del req.headers['Ytdl-socks-proxy']
3040
be4a824d 3041 return self.do_open(functools.partial(
71aff188 3042 _create_http_connection, self, conn_class, True),
4f264c02 3043 req, **kwargs)
be4a824d
PH
3044
3045
1bab3437 3046class YoutubeDLCookieJar(compat_cookiejar.MozillaCookieJar):
f1a8511f
S
3047 """
3048 See [1] for cookie file format.
3049
3050 1. https://curl.haxx.se/docs/http-cookies.html
3051 """
e7e62441 3052 _HTTPONLY_PREFIX = '#HttpOnly_'
c380cc28
S
3053 _ENTRY_LEN = 7
3054 _HEADER = '''# Netscape HTTP Cookie File
7a5c1cfe 3055# This file is generated by yt-dlp. Do not edit.
c380cc28
S
3056
3057'''
3058 _CookieFileEntry = collections.namedtuple(
3059 'CookieFileEntry',
3060 ('domain_name', 'include_subdomains', 'path', 'https_only', 'expires_at', 'name', 'value'))
e7e62441 3061
1bab3437 3062 def save(self, filename=None, ignore_discard=False, ignore_expires=False):
c380cc28
S
3063 """
3064 Save cookies to a file.
3065
3066 Most of the code is taken from CPython 3.8 and slightly adapted
3067 to support cookie files with UTF-8 in both python 2 and 3.
3068 """
3069 if filename is None:
3070 if self.filename is not None:
3071 filename = self.filename
3072 else:
3073 raise ValueError(compat_cookiejar.MISSING_FILENAME_TEXT)
3074
1bab3437
S
3075 # Store session cookies with `expires` set to 0 instead of an empty
3076 # string
3077 for cookie in self:
3078 if cookie.expires is None:
3079 cookie.expires = 0
c380cc28
S
3080
3081 with io.open(filename, 'w', encoding='utf-8') as f:
3082 f.write(self._HEADER)
3083 now = time.time()
3084 for cookie in self:
3085 if not ignore_discard and cookie.discard:
3086 continue
3087 if not ignore_expires and cookie.is_expired(now):
3088 continue
3089 if cookie.secure:
3090 secure = 'TRUE'
3091 else:
3092 secure = 'FALSE'
3093 if cookie.domain.startswith('.'):
3094 initial_dot = 'TRUE'
3095 else:
3096 initial_dot = 'FALSE'
3097 if cookie.expires is not None:
3098 expires = compat_str(cookie.expires)
3099 else:
3100 expires = ''
3101 if cookie.value is None:
3102 # cookies.txt regards 'Set-Cookie: foo' as a cookie
3103 # with no name, whereas http.cookiejar regards it as a
3104 # cookie with no value.
3105 name = ''
3106 value = cookie.name
3107 else:
3108 name = cookie.name
3109 value = cookie.value
3110 f.write(
3111 '\t'.join([cookie.domain, initial_dot, cookie.path,
3112 secure, expires, name, value]) + '\n')
1bab3437
S
3113
3114 def load(self, filename=None, ignore_discard=False, ignore_expires=False):
e7e62441 3115 """Load cookies from a file."""
3116 if filename is None:
3117 if self.filename is not None:
3118 filename = self.filename
3119 else:
3120 raise ValueError(compat_cookiejar.MISSING_FILENAME_TEXT)
3121
c380cc28
S
3122 def prepare_line(line):
3123 if line.startswith(self._HTTPONLY_PREFIX):
3124 line = line[len(self._HTTPONLY_PREFIX):]
3125 # comments and empty lines are fine
3126 if line.startswith('#') or not line.strip():
3127 return line
3128 cookie_list = line.split('\t')
3129 if len(cookie_list) != self._ENTRY_LEN:
3130 raise compat_cookiejar.LoadError('invalid length %d' % len(cookie_list))
3131 cookie = self._CookieFileEntry(*cookie_list)
3132 if cookie.expires_at and not cookie.expires_at.isdigit():
3133 raise compat_cookiejar.LoadError('invalid expires at %s' % cookie.expires_at)
3134 return line
3135
e7e62441 3136 cf = io.StringIO()
c380cc28 3137 with io.open(filename, encoding='utf-8') as f:
e7e62441 3138 for line in f:
c380cc28
S
3139 try:
3140 cf.write(prepare_line(line))
3141 except compat_cookiejar.LoadError as e:
3142 write_string(
3143 'WARNING: skipping cookie file entry due to %s: %r\n'
3144 % (e, line), sys.stderr)
3145 continue
e7e62441 3146 cf.seek(0)
3147 self._really_load(cf, filename, ignore_discard, ignore_expires)
1bab3437
S
3148 # Session cookies are denoted by either `expires` field set to
3149 # an empty string or 0. MozillaCookieJar only recognizes the former
3150 # (see [1]). So we need force the latter to be recognized as session
3151 # cookies on our own.
3152 # Session cookies may be important for cookies-based authentication,
3153 # e.g. usually, when user does not check 'Remember me' check box while
3154 # logging in on a site, some important cookies are stored as session
3155 # cookies so that not recognizing them will result in failed login.
3156 # 1. https://bugs.python.org/issue17164
3157 for cookie in self:
3158 # Treat `expires=0` cookies as session cookies
3159 if cookie.expires == 0:
3160 cookie.expires = None
3161 cookie.discard = True
3162
3163
a6420bf5
S
3164class YoutubeDLCookieProcessor(compat_urllib_request.HTTPCookieProcessor):
3165 def __init__(self, cookiejar=None):
3166 compat_urllib_request.HTTPCookieProcessor.__init__(self, cookiejar)
3167
3168 def http_response(self, request, response):
3169 # Python 2 will choke on next HTTP request in row if there are non-ASCII
3170 # characters in Set-Cookie HTTP header of last response (see
067aa17e 3171 # https://github.com/ytdl-org/youtube-dl/issues/6769).
a6420bf5
S
3172 # In order to at least prevent crashing we will percent encode Set-Cookie
3173 # header before HTTPCookieProcessor starts processing it.
e28034c5
S
3174 # if sys.version_info < (3, 0) and response.headers:
3175 # for set_cookie_header in ('Set-Cookie', 'Set-Cookie2'):
3176 # set_cookie = response.headers.get(set_cookie_header)
3177 # if set_cookie:
3178 # set_cookie_escaped = compat_urllib_parse.quote(set_cookie, b"%/;:@&=+$,!~*'()?#[] ")
3179 # if set_cookie != set_cookie_escaped:
3180 # del response.headers[set_cookie_header]
3181 # response.headers[set_cookie_header] = set_cookie_escaped
a6420bf5
S
3182 return compat_urllib_request.HTTPCookieProcessor.http_response(self, request, response)
3183
f5fa042c 3184 https_request = compat_urllib_request.HTTPCookieProcessor.http_request
a6420bf5
S
3185 https_response = http_response
3186
3187
fca6dba8 3188class YoutubeDLRedirectHandler(compat_urllib_request.HTTPRedirectHandler):
201c1459 3189 """YoutubeDL redirect handler
3190
3191 The code is based on HTTPRedirectHandler implementation from CPython [1].
3192
3193 This redirect handler solves two issues:
3194 - ensures redirect URL is always unicode under python 2
3195 - introduces support for experimental HTTP response status code
3196 308 Permanent Redirect [2] used by some sites [3]
3197
3198 1. https://github.com/python/cpython/blob/master/Lib/urllib/request.py
3199 2. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/308
3200 3. https://github.com/ytdl-org/youtube-dl/issues/28768
3201 """
3202
3203 http_error_301 = http_error_303 = http_error_307 = http_error_308 = compat_urllib_request.HTTPRedirectHandler.http_error_302
3204
3205 def redirect_request(self, req, fp, code, msg, headers, newurl):
3206 """Return a Request or None in response to a redirect.
3207
3208 This is called by the http_error_30x methods when a
3209 redirection response is received. If a redirection should
3210 take place, return a new Request to allow http_error_30x to
3211 perform the redirect. Otherwise, raise HTTPError if no-one
3212 else should try to handle this url. Return None if you can't
3213 but another Handler might.
3214 """
3215 m = req.get_method()
3216 if (not (code in (301, 302, 303, 307, 308) and m in ("GET", "HEAD")
3217 or code in (301, 302, 303) and m == "POST")):
3218 raise compat_HTTPError(req.full_url, code, msg, headers, fp)
3219 # Strictly (according to RFC 2616), 301 or 302 in response to
3220 # a POST MUST NOT cause a redirection without confirmation
3221 # from the user (of urllib.request, in this case). In practice,
3222 # essentially all clients do redirect in this case, so we do
3223 # the same.
3224
3225 # On python 2 urlh.geturl() may sometimes return redirect URL
3226 # as byte string instead of unicode. This workaround allows
3227 # to force it always return unicode.
3228 if sys.version_info[0] < 3:
3229 newurl = compat_str(newurl)
3230
3231 # Be conciliant with URIs containing a space. This is mainly
3232 # redundant with the more complete encoding done in http_error_302(),
3233 # but it is kept for compatibility with other callers.
3234 newurl = newurl.replace(' ', '%20')
3235
3236 CONTENT_HEADERS = ("content-length", "content-type")
3237 # NB: don't use dict comprehension for python 2.6 compatibility
3238 newheaders = dict((k, v) for k, v in req.headers.items()
3239 if k.lower() not in CONTENT_HEADERS)
3240 return compat_urllib_request.Request(
3241 newurl, headers=newheaders, origin_req_host=req.origin_req_host,
3242 unverifiable=True)
fca6dba8
S
3243
3244
46f59e89
S
3245def extract_timezone(date_str):
3246 m = re.search(
f137e4c2 3247 r'''(?x)
3248 ^.{8,}? # >=8 char non-TZ prefix, if present
3249 (?P<tz>Z| # just the UTC Z, or
3250 (?:(?<=.\b\d{4}|\b\d{2}:\d\d)| # preceded by 4 digits or hh:mm or
3251 (?<!.\b[a-zA-Z]{3}|[a-zA-Z]{4}|..\b\d\d)) # not preceded by 3 alpha word or >= 4 alpha or 2 digits
3252 [ ]? # optional space
3253 (?P<sign>\+|-) # +/-
3254 (?P<hours>[0-9]{2}):?(?P<minutes>[0-9]{2}) # hh[:]mm
3255 $)
3256 ''', date_str)
46f59e89
S
3257 if not m:
3258 timezone = datetime.timedelta()
3259 else:
3260 date_str = date_str[:-len(m.group('tz'))]
3261 if not m.group('sign'):
3262 timezone = datetime.timedelta()
3263 else:
3264 sign = 1 if m.group('sign') == '+' else -1
3265 timezone = datetime.timedelta(
3266 hours=sign * int(m.group('hours')),
3267 minutes=sign * int(m.group('minutes')))
3268 return timezone, date_str
3269
3270
08b38d54 3271def parse_iso8601(date_str, delimiter='T', timezone=None):
912b38b4
PH
3272 """ Return a UNIX timestamp from the given date """
3273
3274 if date_str is None:
3275 return None
3276
52c3a6e4
S
3277 date_str = re.sub(r'\.[0-9]+', '', date_str)
3278
08b38d54 3279 if timezone is None:
46f59e89
S
3280 timezone, date_str = extract_timezone(date_str)
3281
52c3a6e4
S
3282 try:
3283 date_format = '%Y-%m-%d{0}%H:%M:%S'.format(delimiter)
3284 dt = datetime.datetime.strptime(date_str, date_format) - timezone
3285 return calendar.timegm(dt.timetuple())
3286 except ValueError:
3287 pass
912b38b4
PH
3288
3289
46f59e89
S
3290def date_formats(day_first=True):
3291 return DATE_FORMATS_DAY_FIRST if day_first else DATE_FORMATS_MONTH_FIRST
3292
3293
42bdd9d0 3294def unified_strdate(date_str, day_first=True):
bf50b038 3295 """Return a string with the date in the format YYYYMMDD"""
64e7ad60
PH
3296
3297 if date_str is None:
3298 return None
bf50b038 3299 upload_date = None
5f6a1245 3300 # Replace commas
026fcc04 3301 date_str = date_str.replace(',', ' ')
42bdd9d0 3302 # Remove AM/PM + timezone
9bb8e0a3 3303 date_str = re.sub(r'(?i)\s*(?:AM|PM)(?:\s+[A-Z]+)?', '', date_str)
46f59e89 3304 _, date_str = extract_timezone(date_str)
42bdd9d0 3305
46f59e89 3306 for expression in date_formats(day_first):
bf50b038
JMF
3307 try:
3308 upload_date = datetime.datetime.strptime(date_str, expression).strftime('%Y%m%d')
5de90176 3309 except ValueError:
bf50b038 3310 pass
42393ce2
PH
3311 if upload_date is None:
3312 timetuple = email.utils.parsedate_tz(date_str)
3313 if timetuple:
c6b9cf05
S
3314 try:
3315 upload_date = datetime.datetime(*timetuple[:6]).strftime('%Y%m%d')
3316 except ValueError:
3317 pass
6a750402
JMF
3318 if upload_date is not None:
3319 return compat_str(upload_date)
bf50b038 3320
5f6a1245 3321
46f59e89
S
3322def unified_timestamp(date_str, day_first=True):
3323 if date_str is None:
3324 return None
3325
2ae2ffda 3326 date_str = re.sub(r'[,|]', '', date_str)
46f59e89 3327
7dc2a74e 3328 pm_delta = 12 if re.search(r'(?i)PM', date_str) else 0
46f59e89
S
3329 timezone, date_str = extract_timezone(date_str)
3330
3331 # Remove AM/PM + timezone
3332 date_str = re.sub(r'(?i)\s*(?:AM|PM)(?:\s+[A-Z]+)?', '', date_str)
3333
deef3195
S
3334 # Remove unrecognized timezones from ISO 8601 alike timestamps
3335 m = re.search(r'\d{1,2}:\d{1,2}(?:\.\d+)?(?P<tz>\s*[A-Z]+)$', date_str)
3336 if m:
3337 date_str = date_str[:-len(m.group('tz'))]
3338
f226880c
PH
3339 # Python only supports microseconds, so remove nanoseconds
3340 m = re.search(r'^([0-9]{4,}-[0-9]{1,2}-[0-9]{1,2}T[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}\.[0-9]{6})[0-9]+$', date_str)
3341 if m:
3342 date_str = m.group(1)
3343
46f59e89
S
3344 for expression in date_formats(day_first):
3345 try:
7dc2a74e 3346 dt = datetime.datetime.strptime(date_str, expression) - timezone + datetime.timedelta(hours=pm_delta)
46f59e89
S
3347 return calendar.timegm(dt.timetuple())
3348 except ValueError:
3349 pass
3350 timetuple = email.utils.parsedate_tz(date_str)
3351 if timetuple:
7dc2a74e 3352 return calendar.timegm(timetuple) + pm_delta * 3600
46f59e89
S
3353
3354
28e614de 3355def determine_ext(url, default_ext='unknown_video'):
85750f89 3356 if url is None or '.' not in url:
f4776371 3357 return default_ext
9cb9a5df 3358 guess = url.partition('?')[0].rpartition('.')[2]
73e79f2a
PH
3359 if re.match(r'^[A-Za-z0-9]+$', guess):
3360 return guess
a7aaa398
S
3361 # Try extract ext from URLs like http://example.com/foo/bar.mp4/?download
3362 elif guess.rstrip('/') in KNOWN_EXTENSIONS:
9cb9a5df 3363 return guess.rstrip('/')
73e79f2a 3364 else:
cbdbb766 3365 return default_ext
73e79f2a 3366
5f6a1245 3367
824fa511
S
3368def subtitles_filename(filename, sub_lang, sub_format, expected_real_ext=None):
3369 return replace_extension(filename, sub_lang + '.' + sub_format, expected_real_ext)
d4051a8e 3370
5f6a1245 3371
9e62f283 3372def datetime_from_str(date_str, precision='auto', format='%Y%m%d'):
37254abc
JMF
3373 """
3374 Return a datetime object from a string in the format YYYYMMDD or
9e62f283 3375 (now|today|date)[+-][0-9](microsecond|second|minute|hour|day|week|month|year)(s)?
3376
3377 format: string date format used to return datetime object from
3378 precision: round the time portion of a datetime object.
3379 auto|microsecond|second|minute|hour|day.
3380 auto: round to the unit provided in date_str (if applicable).
3381 """
3382 auto_precision = False
3383 if precision == 'auto':
3384 auto_precision = True
3385 precision = 'microsecond'
3386 today = datetime_round(datetime.datetime.now(), precision)
f8795e10 3387 if date_str in ('now', 'today'):
37254abc 3388 return today
f8795e10
PH
3389 if date_str == 'yesterday':
3390 return today - datetime.timedelta(days=1)
9e62f283 3391 match = re.match(
3392 r'(?P<start>.+)(?P<sign>[+-])(?P<time>\d+)(?P<unit>microsecond|second|minute|hour|day|week|month|year)(s)?',
3393 date_str)
37254abc 3394 if match is not None:
9e62f283 3395 start_time = datetime_from_str(match.group('start'), precision, format)
3396 time = int(match.group('time')) * (-1 if match.group('sign') == '-' else 1)
37254abc 3397 unit = match.group('unit')
9e62f283 3398 if unit == 'month' or unit == 'year':
3399 new_date = datetime_add_months(start_time, time * 12 if unit == 'year' else time)
37254abc 3400 unit = 'day'
9e62f283 3401 else:
3402 if unit == 'week':
3403 unit = 'day'
3404 time *= 7
3405 delta = datetime.timedelta(**{unit + 's': time})
3406 new_date = start_time + delta
3407 if auto_precision:
3408 return datetime_round(new_date, unit)
3409 return new_date
3410
3411 return datetime_round(datetime.datetime.strptime(date_str, format), precision)
3412
3413
3414def date_from_str(date_str, format='%Y%m%d'):
3415 """
3416 Return a datetime object from a string in the format YYYYMMDD or
3417 (now|today|date)[+-][0-9](microsecond|second|minute|hour|day|week|month|year)(s)?
3418
3419 format: string date format used to return datetime object from
3420 """
3421 return datetime_from_str(date_str, precision='microsecond', format=format).date()
3422
3423
3424def datetime_add_months(dt, months):
3425 """Increment/Decrement a datetime object by months."""
3426 month = dt.month + months - 1
3427 year = dt.year + month // 12
3428 month = month % 12 + 1
3429 day = min(dt.day, calendar.monthrange(year, month)[1])
3430 return dt.replace(year, month, day)
3431
3432
3433def datetime_round(dt, precision='day'):
3434 """
3435 Round a datetime object's time to a specific precision
3436 """
3437 if precision == 'microsecond':
3438 return dt
3439
3440 unit_seconds = {
3441 'day': 86400,
3442 'hour': 3600,
3443 'minute': 60,
3444 'second': 1,
3445 }
3446 roundto = lambda x, n: ((x + n / 2) // n) * n
3447 timestamp = calendar.timegm(dt.timetuple())
3448 return datetime.datetime.utcfromtimestamp(roundto(timestamp, unit_seconds[precision]))
5f6a1245
JW
3449
3450
e63fc1be 3451def hyphenate_date(date_str):
3452 """
3453 Convert a date in 'YYYYMMDD' format to 'YYYY-MM-DD' format"""
3454 match = re.match(r'^(\d\d\d\d)(\d\d)(\d\d)$', date_str)
3455 if match is not None:
3456 return '-'.join(match.groups())
3457 else:
3458 return date_str
3459
5f6a1245 3460
bd558525
JMF
3461class DateRange(object):
3462 """Represents a time interval between two dates"""
5f6a1245 3463
bd558525
JMF
3464 def __init__(self, start=None, end=None):
3465 """start and end must be strings in the format accepted by date"""
3466 if start is not None:
3467 self.start = date_from_str(start)
3468 else:
3469 self.start = datetime.datetime.min.date()
3470 if end is not None:
3471 self.end = date_from_str(end)
3472 else:
3473 self.end = datetime.datetime.max.date()
37254abc 3474 if self.start > self.end:
bd558525 3475 raise ValueError('Date range: "%s" , the start date must be before the end date' % self)
5f6a1245 3476
bd558525
JMF
3477 @classmethod
3478 def day(cls, day):
3479 """Returns a range that only contains the given day"""
5f6a1245
JW
3480 return cls(day, day)
3481
bd558525
JMF
3482 def __contains__(self, date):
3483 """Check if the date is in the range"""
37254abc
JMF
3484 if not isinstance(date, datetime.date):
3485 date = date_from_str(date)
3486 return self.start <= date <= self.end
5f6a1245 3487
bd558525 3488 def __str__(self):
5f6a1245 3489 return '%s - %s' % (self.start.isoformat(), self.end.isoformat())
c496ca96
PH
3490
3491
3492def platform_name():
3493 """ Returns the platform name as a compat_str """
3494 res = platform.platform()
3495 if isinstance(res, bytes):
3496 res = res.decode(preferredencoding())
3497
3498 assert isinstance(res, compat_str)
3499 return res
c257baff
PH
3500
3501
49fa4d9a
N
3502def get_windows_version():
3503 ''' Get Windows version. None if it's not running on Windows '''
3504 if compat_os_name == 'nt':
3505 return version_tuple(platform.win32_ver()[1])
3506 else:
3507 return None
3508
3509
b58ddb32
PH
3510def _windows_write_string(s, out):
3511 """ Returns True if the string was written using special methods,
3512 False if it has yet to be written out."""
3513 # Adapted from http://stackoverflow.com/a/3259271/35070
3514
b58ddb32
PH
3515 import ctypes.wintypes
3516
3517 WIN_OUTPUT_IDS = {
3518 1: -11,
3519 2: -12,
3520 }
3521
a383a98a
PH
3522 try:
3523 fileno = out.fileno()
3524 except AttributeError:
3525 # If the output stream doesn't have a fileno, it's virtual
3526 return False
aa42e873
PH
3527 except io.UnsupportedOperation:
3528 # Some strange Windows pseudo files?
3529 return False
b58ddb32
PH
3530 if fileno not in WIN_OUTPUT_IDS:
3531 return False
3532
d7cd9a9e 3533 GetStdHandle = compat_ctypes_WINFUNCTYPE(
b58ddb32 3534 ctypes.wintypes.HANDLE, ctypes.wintypes.DWORD)(
d7cd9a9e 3535 ('GetStdHandle', ctypes.windll.kernel32))
b58ddb32
PH
3536 h = GetStdHandle(WIN_OUTPUT_IDS[fileno])
3537
d7cd9a9e 3538 WriteConsoleW = compat_ctypes_WINFUNCTYPE(
b58ddb32
PH
3539 ctypes.wintypes.BOOL, ctypes.wintypes.HANDLE, ctypes.wintypes.LPWSTR,
3540 ctypes.wintypes.DWORD, ctypes.POINTER(ctypes.wintypes.DWORD),
d7cd9a9e 3541 ctypes.wintypes.LPVOID)(('WriteConsoleW', ctypes.windll.kernel32))
b58ddb32
PH
3542 written = ctypes.wintypes.DWORD(0)
3543
d7cd9a9e 3544 GetFileType = compat_ctypes_WINFUNCTYPE(ctypes.wintypes.DWORD, ctypes.wintypes.DWORD)(('GetFileType', ctypes.windll.kernel32))
b58ddb32
PH
3545 FILE_TYPE_CHAR = 0x0002
3546 FILE_TYPE_REMOTE = 0x8000
d7cd9a9e 3547 GetConsoleMode = compat_ctypes_WINFUNCTYPE(
b58ddb32
PH
3548 ctypes.wintypes.BOOL, ctypes.wintypes.HANDLE,
3549 ctypes.POINTER(ctypes.wintypes.DWORD))(
d7cd9a9e 3550 ('GetConsoleMode', ctypes.windll.kernel32))
b58ddb32
PH
3551 INVALID_HANDLE_VALUE = ctypes.wintypes.DWORD(-1).value
3552
3553 def not_a_console(handle):
3554 if handle == INVALID_HANDLE_VALUE or handle is None:
3555 return True
3089bc74
S
3556 return ((GetFileType(handle) & ~FILE_TYPE_REMOTE) != FILE_TYPE_CHAR
3557 or GetConsoleMode(handle, ctypes.byref(ctypes.wintypes.DWORD())) == 0)
b58ddb32
PH
3558
3559 if not_a_console(h):
3560 return False
3561
d1b9c912
PH
3562 def next_nonbmp_pos(s):
3563 try:
3564 return next(i for i, c in enumerate(s) if ord(c) > 0xffff)
3565 except StopIteration:
3566 return len(s)
3567
3568 while s:
3569 count = min(next_nonbmp_pos(s), 1024)
3570
b58ddb32 3571 ret = WriteConsoleW(
d1b9c912 3572 h, s, count if count else 2, ctypes.byref(written), None)
b58ddb32
PH
3573 if ret == 0:
3574 raise OSError('Failed to write string')
d1b9c912
PH
3575 if not count: # We just wrote a non-BMP character
3576 assert written.value == 2
3577 s = s[1:]
3578 else:
3579 assert written.value > 0
3580 s = s[written.value:]
b58ddb32
PH
3581 return True
3582
3583
734f90bb 3584def write_string(s, out=None, encoding=None):
7459e3a2
PH
3585 if out is None:
3586 out = sys.stderr
8bf48f23 3587 assert type(s) == compat_str
7459e3a2 3588
b58ddb32
PH
3589 if sys.platform == 'win32' and encoding is None and hasattr(out, 'fileno'):
3590 if _windows_write_string(s, out):
3591 return
3592
3089bc74
S
3593 if ('b' in getattr(out, 'mode', '')
3594 or sys.version_info[0] < 3): # Python 2 lies about mode of sys.stderr
104aa738
PH
3595 byt = s.encode(encoding or preferredencoding(), 'ignore')
3596 out.write(byt)
3597 elif hasattr(out, 'buffer'):
3598 enc = encoding or getattr(out, 'encoding', None) or preferredencoding()
3599 byt = s.encode(enc, 'ignore')
3600 out.buffer.write(byt)
3601 else:
8bf48f23 3602 out.write(s)
7459e3a2
PH
3603 out.flush()
3604
3605
48ea9cea
PH
3606def bytes_to_intlist(bs):
3607 if not bs:
3608 return []
3609 if isinstance(bs[0], int): # Python 3
3610 return list(bs)
3611 else:
3612 return [ord(c) for c in bs]
3613
c257baff 3614
cba892fa 3615def intlist_to_bytes(xs):
3616 if not xs:
3617 return b''
edaa23f8 3618 return compat_struct_pack('%dB' % len(xs), *xs)
c38b1e77
PH
3619
3620
c1c9a79c
PH
3621# Cross-platform file locking
3622if sys.platform == 'win32':
3623 import ctypes.wintypes
3624 import msvcrt
3625
3626 class OVERLAPPED(ctypes.Structure):
3627 _fields_ = [
3628 ('Internal', ctypes.wintypes.LPVOID),
3629 ('InternalHigh', ctypes.wintypes.LPVOID),
3630 ('Offset', ctypes.wintypes.DWORD),
3631 ('OffsetHigh', ctypes.wintypes.DWORD),
3632 ('hEvent', ctypes.wintypes.HANDLE),
3633 ]
3634
3635 kernel32 = ctypes.windll.kernel32
3636 LockFileEx = kernel32.LockFileEx
3637 LockFileEx.argtypes = [
3638 ctypes.wintypes.HANDLE, # hFile
3639 ctypes.wintypes.DWORD, # dwFlags
3640 ctypes.wintypes.DWORD, # dwReserved
3641 ctypes.wintypes.DWORD, # nNumberOfBytesToLockLow
3642 ctypes.wintypes.DWORD, # nNumberOfBytesToLockHigh
3643 ctypes.POINTER(OVERLAPPED) # Overlapped
3644 ]
3645 LockFileEx.restype = ctypes.wintypes.BOOL
3646 UnlockFileEx = kernel32.UnlockFileEx
3647 UnlockFileEx.argtypes = [
3648 ctypes.wintypes.HANDLE, # hFile
3649 ctypes.wintypes.DWORD, # dwReserved
3650 ctypes.wintypes.DWORD, # nNumberOfBytesToLockLow
3651 ctypes.wintypes.DWORD, # nNumberOfBytesToLockHigh
3652 ctypes.POINTER(OVERLAPPED) # Overlapped
3653 ]
3654 UnlockFileEx.restype = ctypes.wintypes.BOOL
3655 whole_low = 0xffffffff
3656 whole_high = 0x7fffffff
3657
3658 def _lock_file(f, exclusive):
3659 overlapped = OVERLAPPED()
3660 overlapped.Offset = 0
3661 overlapped.OffsetHigh = 0
3662 overlapped.hEvent = 0
3663 f._lock_file_overlapped_p = ctypes.pointer(overlapped)
3664 handle = msvcrt.get_osfhandle(f.fileno())
3665 if not LockFileEx(handle, 0x2 if exclusive else 0x0, 0,
3666 whole_low, whole_high, f._lock_file_overlapped_p):
3667 raise OSError('Locking file failed: %r' % ctypes.FormatError())
3668
3669 def _unlock_file(f):
3670 assert f._lock_file_overlapped_p
3671 handle = msvcrt.get_osfhandle(f.fileno())
3672 if not UnlockFileEx(handle, 0,
3673 whole_low, whole_high, f._lock_file_overlapped_p):
3674 raise OSError('Unlocking file failed: %r' % ctypes.FormatError())
3675
3676else:
399a76e6
YCH
3677 # Some platforms, such as Jython, is missing fcntl
3678 try:
3679 import fcntl
c1c9a79c 3680
399a76e6
YCH
3681 def _lock_file(f, exclusive):
3682 fcntl.flock(f, fcntl.LOCK_EX if exclusive else fcntl.LOCK_SH)
c1c9a79c 3683
399a76e6
YCH
3684 def _unlock_file(f):
3685 fcntl.flock(f, fcntl.LOCK_UN)
3686 except ImportError:
3687 UNSUPPORTED_MSG = 'file locking is not supported on this platform'
3688
3689 def _lock_file(f, exclusive):
3690 raise IOError(UNSUPPORTED_MSG)
3691
3692 def _unlock_file(f):
3693 raise IOError(UNSUPPORTED_MSG)
c1c9a79c
PH
3694
3695
3696class locked_file(object):
3697 def __init__(self, filename, mode, encoding=None):
3698 assert mode in ['r', 'a', 'w']
3699 self.f = io.open(filename, mode, encoding=encoding)
3700 self.mode = mode
3701
3702 def __enter__(self):
3703 exclusive = self.mode != 'r'
3704 try:
3705 _lock_file(self.f, exclusive)
3706 except IOError:
3707 self.f.close()
3708 raise
3709 return self
3710
3711 def __exit__(self, etype, value, traceback):
3712 try:
3713 _unlock_file(self.f)
3714 finally:
3715 self.f.close()
3716
3717 def __iter__(self):
3718 return iter(self.f)
3719
3720 def write(self, *args):
3721 return self.f.write(*args)
3722
3723 def read(self, *args):
3724 return self.f.read(*args)
4eb7f1d1
JMF
3725
3726
4644ac55
S
3727def get_filesystem_encoding():
3728 encoding = sys.getfilesystemencoding()
3729 return encoding if encoding is not None else 'utf-8'
3730
3731
4eb7f1d1 3732def shell_quote(args):
a6a173c2 3733 quoted_args = []
4644ac55 3734 encoding = get_filesystem_encoding()
a6a173c2
JMF
3735 for a in args:
3736 if isinstance(a, bytes):
3737 # We may get a filename encoded with 'encodeFilename'
3738 a = a.decode(encoding)
aefce8e6 3739 quoted_args.append(compat_shlex_quote(a))
28e614de 3740 return ' '.join(quoted_args)
9d4660ca
PH
3741
3742
3743def smuggle_url(url, data):
3744 """ Pass additional data in a URL for internal use. """
3745
81953d1a
RA
3746 url, idata = unsmuggle_url(url, {})
3747 data.update(idata)
15707c7e 3748 sdata = compat_urllib_parse_urlencode(
28e614de
PH
3749 {'__youtubedl_smuggle': json.dumps(data)})
3750 return url + '#' + sdata
9d4660ca
PH
3751
3752
79f82953 3753def unsmuggle_url(smug_url, default=None):
83e865a3 3754 if '#__youtubedl_smuggle' not in smug_url:
79f82953 3755 return smug_url, default
28e614de
PH
3756 url, _, sdata = smug_url.rpartition('#')
3757 jsond = compat_parse_qs(sdata)['__youtubedl_smuggle'][0]
9d4660ca
PH
3758 data = json.loads(jsond)
3759 return url, data
02dbf93f
PH
3760
3761
e0fd9573 3762def format_decimal_suffix(num, fmt='%d%s', *, factor=1000):
3763 """ Formats numbers with decimal sufixes like K, M, etc """
3764 num, factor = float_or_none(num), float(factor)
3765 if num is None:
3766 return None
3767 exponent = 0 if num == 0 else int(math.log(num, factor))
abbeeebc 3768 suffix = ['', *'kMGTPEZY'][exponent]
3769 if factor == 1024:
3770 suffix = {'k': 'Ki', '': ''}.get(suffix, f'{suffix}i')
e0fd9573 3771 converted = num / (factor ** exponent)
abbeeebc 3772 return fmt % (converted, suffix)
e0fd9573 3773
3774
02dbf93f 3775def format_bytes(bytes):
f02d24d8 3776 return format_decimal_suffix(bytes, '%.2f%sB', factor=1024) or 'N/A'
f53c966a 3777
1c088fa8 3778
fb47597b
S
3779def lookup_unit_table(unit_table, s):
3780 units_re = '|'.join(re.escape(u) for u in unit_table)
3781 m = re.match(
782b1b5b 3782 r'(?P<num>[0-9]+(?:[,.][0-9]*)?)\s*(?P<unit>%s)\b' % units_re, s)
fb47597b
S
3783 if not m:
3784 return None
3785 num_str = m.group('num').replace(',', '.')
3786 mult = unit_table[m.group('unit')]
3787 return int(float(num_str) * mult)
3788
3789
be64b5b0
PH
3790def parse_filesize(s):
3791 if s is None:
3792 return None
3793
dfb1b146 3794 # The lower-case forms are of course incorrect and unofficial,
be64b5b0
PH
3795 # but we support those too
3796 _UNIT_TABLE = {
3797 'B': 1,
3798 'b': 1,
70852b47 3799 'bytes': 1,
be64b5b0
PH
3800 'KiB': 1024,
3801 'KB': 1000,
3802 'kB': 1024,
3803 'Kb': 1000,
13585d76 3804 'kb': 1000,
70852b47
YCH
3805 'kilobytes': 1000,
3806 'kibibytes': 1024,
be64b5b0
PH
3807 'MiB': 1024 ** 2,
3808 'MB': 1000 ** 2,
3809 'mB': 1024 ** 2,
3810 'Mb': 1000 ** 2,
13585d76 3811 'mb': 1000 ** 2,
70852b47
YCH
3812 'megabytes': 1000 ** 2,
3813 'mebibytes': 1024 ** 2,
be64b5b0
PH
3814 'GiB': 1024 ** 3,
3815 'GB': 1000 ** 3,
3816 'gB': 1024 ** 3,
3817 'Gb': 1000 ** 3,
13585d76 3818 'gb': 1000 ** 3,
70852b47
YCH
3819 'gigabytes': 1000 ** 3,
3820 'gibibytes': 1024 ** 3,
be64b5b0
PH
3821 'TiB': 1024 ** 4,
3822 'TB': 1000 ** 4,
3823 'tB': 1024 ** 4,
3824 'Tb': 1000 ** 4,
13585d76 3825 'tb': 1000 ** 4,
70852b47
YCH
3826 'terabytes': 1000 ** 4,
3827 'tebibytes': 1024 ** 4,
be64b5b0
PH
3828 'PiB': 1024 ** 5,
3829 'PB': 1000 ** 5,
3830 'pB': 1024 ** 5,
3831 'Pb': 1000 ** 5,
13585d76 3832 'pb': 1000 ** 5,
70852b47
YCH
3833 'petabytes': 1000 ** 5,
3834 'pebibytes': 1024 ** 5,
be64b5b0
PH
3835 'EiB': 1024 ** 6,
3836 'EB': 1000 ** 6,
3837 'eB': 1024 ** 6,
3838 'Eb': 1000 ** 6,
13585d76 3839 'eb': 1000 ** 6,
70852b47
YCH
3840 'exabytes': 1000 ** 6,
3841 'exbibytes': 1024 ** 6,
be64b5b0
PH
3842 'ZiB': 1024 ** 7,
3843 'ZB': 1000 ** 7,
3844 'zB': 1024 ** 7,
3845 'Zb': 1000 ** 7,
13585d76 3846 'zb': 1000 ** 7,
70852b47
YCH
3847 'zettabytes': 1000 ** 7,
3848 'zebibytes': 1024 ** 7,
be64b5b0
PH
3849 'YiB': 1024 ** 8,
3850 'YB': 1000 ** 8,
3851 'yB': 1024 ** 8,
3852 'Yb': 1000 ** 8,
13585d76 3853 'yb': 1000 ** 8,
70852b47
YCH
3854 'yottabytes': 1000 ** 8,
3855 'yobibytes': 1024 ** 8,
be64b5b0
PH
3856 }
3857
fb47597b
S
3858 return lookup_unit_table(_UNIT_TABLE, s)
3859
3860
3861def parse_count(s):
3862 if s is None:
be64b5b0
PH
3863 return None
3864
352d5da8 3865 s = re.sub(r'^[^\d]+\s', '', s).strip()
fb47597b
S
3866
3867 if re.match(r'^[\d,.]+$', s):
3868 return str_to_int(s)
3869
3870 _UNIT_TABLE = {
3871 'k': 1000,
3872 'K': 1000,
3873 'm': 1000 ** 2,
3874 'M': 1000 ** 2,
3875 'kk': 1000 ** 2,
3876 'KK': 1000 ** 2,
352d5da8 3877 'b': 1000 ** 3,
3878 'B': 1000 ** 3,
fb47597b 3879 }
be64b5b0 3880
352d5da8 3881 ret = lookup_unit_table(_UNIT_TABLE, s)
3882 if ret is not None:
3883 return ret
3884
3885 mobj = re.match(r'([\d,.]+)(?:$|\s)', s)
3886 if mobj:
3887 return str_to_int(mobj.group(1))
be64b5b0 3888
2f7ae819 3889
b871d7e9
S
3890def parse_resolution(s):
3891 if s is None:
3892 return {}
3893
17ec8bcf 3894 mobj = re.search(r'(?<![a-zA-Z0-9])(?P<w>\d+)\s*[xX×,]\s*(?P<h>\d+)(?![a-zA-Z0-9])', s)
b871d7e9
S
3895 if mobj:
3896 return {
3897 'width': int(mobj.group('w')),
3898 'height': int(mobj.group('h')),
3899 }
3900
17ec8bcf 3901 mobj = re.search(r'(?<![a-zA-Z0-9])(\d+)[pPiI](?![a-zA-Z0-9])', s)
b871d7e9
S
3902 if mobj:
3903 return {'height': int(mobj.group(1))}
3904
3905 mobj = re.search(r'\b([48])[kK]\b', s)
3906 if mobj:
3907 return {'height': int(mobj.group(1)) * 540}
3908
3909 return {}
3910
3911
0dc41787
S
3912def parse_bitrate(s):
3913 if not isinstance(s, compat_str):
3914 return
3915 mobj = re.search(r'\b(\d+)\s*kbps', s)
3916 if mobj:
3917 return int(mobj.group(1))
3918
3919
a942d6cb 3920def month_by_name(name, lang='en'):
caefb1de
PH
3921 """ Return the number of a month by (locale-independently) English name """
3922
f6717dec 3923 month_names = MONTH_NAMES.get(lang, MONTH_NAMES['en'])
a942d6cb 3924
caefb1de 3925 try:
f6717dec 3926 return month_names.index(name) + 1
7105440c
YCH
3927 except ValueError:
3928 return None
3929
3930
3931def month_by_abbreviation(abbrev):
3932 """ Return the number of a month by (locale-independently) English
3933 abbreviations """
3934
3935 try:
3936 return [s[:3] for s in ENGLISH_MONTH_NAMES].index(abbrev) + 1
caefb1de
PH
3937 except ValueError:
3938 return None
18258362
JMF
3939
3940
5aafe895 3941def fix_xml_ampersands(xml_str):
18258362 3942 """Replace all the '&' by '&amp;' in XML"""
5aafe895
PH
3943 return re.sub(
3944 r'&(?!amp;|lt;|gt;|apos;|quot;|#x[0-9a-fA-F]{,4};|#[0-9]{,4};)',
28e614de 3945 '&amp;',
5aafe895 3946 xml_str)
e3946f98
PH
3947
3948
3949def setproctitle(title):
8bf48f23 3950 assert isinstance(title, compat_str)
c1c05c67
YCH
3951
3952 # ctypes in Jython is not complete
3953 # http://bugs.jython.org/issue2148
3954 if sys.platform.startswith('java'):
3955 return
3956
e3946f98 3957 try:
611c1dd9 3958 libc = ctypes.cdll.LoadLibrary('libc.so.6')
e3946f98
PH
3959 except OSError:
3960 return
2f49bcd6
RC
3961 except TypeError:
3962 # LoadLibrary in Windows Python 2.7.13 only expects
3963 # a bytestring, but since unicode_literals turns
3964 # every string into a unicode string, it fails.
3965 return
6eefe533
PH
3966 title_bytes = title.encode('utf-8')
3967 buf = ctypes.create_string_buffer(len(title_bytes))
3968 buf.value = title_bytes
e3946f98 3969 try:
6eefe533 3970 libc.prctl(15, buf, 0, 0, 0)
e3946f98
PH
3971 except AttributeError:
3972 return # Strange libc, just skip this
d7dda168
PH
3973
3974
3975def remove_start(s, start):
46bc9b7d 3976 return s[len(start):] if s is not None and s.startswith(start) else s
29eb5174
PH
3977
3978
2b9faf55 3979def remove_end(s, end):
46bc9b7d 3980 return s[:-len(end)] if s is not None and s.endswith(end) else s
2b9faf55
PH
3981
3982
31b2051e
S
3983def remove_quotes(s):
3984 if s is None or len(s) < 2:
3985 return s
3986 for quote in ('"', "'", ):
3987 if s[0] == quote and s[-1] == quote:
3988 return s[1:-1]
3989 return s
3990
3991
b6e0c7d2
U
3992def get_domain(url):
3993 domain = re.match(r'(?:https?:\/\/)?(?:www\.)?(?P<domain>[^\n\/]+\.[^\n\/]+)(?:\/(.*))?', url)
3994 return domain.group('domain') if domain else None
3995
3996
29eb5174 3997def url_basename(url):
9b8aaeed 3998 path = compat_urlparse.urlparse(url).path
28e614de 3999 return path.strip('/').split('/')[-1]
aa94a6d3
PH
4000
4001
02dc0a36
S
4002def base_url(url):
4003 return re.match(r'https?://[^?#&]+/', url).group()
4004
4005
e34c3361 4006def urljoin(base, path):
4b5de77b
S
4007 if isinstance(path, bytes):
4008 path = path.decode('utf-8')
e34c3361
S
4009 if not isinstance(path, compat_str) or not path:
4010 return None
fad4ceb5 4011 if re.match(r'^(?:[a-zA-Z][a-zA-Z0-9+-.]*:)?//', path):
e34c3361 4012 return path
4b5de77b
S
4013 if isinstance(base, bytes):
4014 base = base.decode('utf-8')
4015 if not isinstance(base, compat_str) or not re.match(
4016 r'^(?:https?:)?//', base):
e34c3361
S
4017 return None
4018 return compat_urlparse.urljoin(base, path)
4019
4020
aa94a6d3
PH
4021class HEADRequest(compat_urllib_request.Request):
4022 def get_method(self):
611c1dd9 4023 return 'HEAD'
7217e148
PH
4024
4025
95cf60e8
S
4026class PUTRequest(compat_urllib_request.Request):
4027 def get_method(self):
4028 return 'PUT'
4029
4030
9732d77e 4031def int_or_none(v, scale=1, default=None, get_attr=None, invscale=1):
9e907ebd 4032 if get_attr and v is not None:
4033 v = getattr(v, get_attr, None)
1812afb7
S
4034 try:
4035 return int(v) * invscale // scale
31c49255 4036 except (ValueError, TypeError, OverflowError):
af98f8ff 4037 return default
9732d77e 4038
9572013d 4039
40a90862
JMF
4040def str_or_none(v, default=None):
4041 return default if v is None else compat_str(v)
4042
9732d77e
PH
4043
4044def str_to_int(int_str):
48d4681e 4045 """ A more relaxed version of int_or_none """
42db58ec 4046 if isinstance(int_str, compat_integer_types):
348c6bf1 4047 return int_str
42db58ec
S
4048 elif isinstance(int_str, compat_str):
4049 int_str = re.sub(r'[,\.\+]', '', int_str)
4050 return int_or_none(int_str)
608d11f5
PH
4051
4052
9732d77e 4053def float_or_none(v, scale=1, invscale=1, default=None):
caf80631
S
4054 if v is None:
4055 return default
4056 try:
4057 return float(v) * invscale / scale
5e1271c5 4058 except (ValueError, TypeError):
caf80631 4059 return default
43f775e4
PH
4060
4061
c7e327c4
S
4062def bool_or_none(v, default=None):
4063 return v if isinstance(v, bool) else default
4064
4065
53cd37ba
S
4066def strip_or_none(v, default=None):
4067 return v.strip() if isinstance(v, compat_str) else default
b72b4431
S
4068
4069
af03000a
S
4070def url_or_none(url):
4071 if not url or not isinstance(url, compat_str):
4072 return None
4073 url = url.strip()
29f7c58a 4074 return url if re.match(r'^(?:(?:https?|rt(?:m(?:pt?[es]?|fp)|sp[su]?)|mms|ftps?):)?//', url) else None
af03000a
S
4075
4076
e29663c6 4077def strftime_or_none(timestamp, date_format, default=None):
4078 datetime_object = None
4079 try:
4080 if isinstance(timestamp, compat_numeric_types): # unix timestamp
4081 datetime_object = datetime.datetime.utcfromtimestamp(timestamp)
4082 elif isinstance(timestamp, compat_str): # assume YYYYMMDD
4083 datetime_object = datetime.datetime.strptime(timestamp, '%Y%m%d')
4084 return datetime_object.strftime(date_format)
4085 except (ValueError, TypeError, AttributeError):
4086 return default
4087
4088
608d11f5 4089def parse_duration(s):
8f9312c3 4090 if not isinstance(s, compat_basestring):
608d11f5 4091 return None
ca7b3246 4092 s = s.strip()
38d79fd1 4093 if not s:
4094 return None
ca7b3246 4095
acaff495 4096 days, hours, mins, secs, ms = [None] * 5
8bd1c00b 4097 m = re.match(r'''(?x)
4098 (?P<before_secs>
4099 (?:(?:(?P<days>[0-9]+):)?(?P<hours>[0-9]+):)?(?P<mins>[0-9]+):)?
4100 (?P<secs>(?(before_secs)[0-9]{1,2}|[0-9]+))
4101 (?P<ms>[.:][0-9]+)?Z?$
4102 ''', s)
acaff495 4103 if m:
8bd1c00b 4104 days, hours, mins, secs, ms = m.group('days', 'hours', 'mins', 'secs', 'ms')
acaff495 4105 else:
4106 m = re.match(
056653bb
S
4107 r'''(?ix)(?:P?
4108 (?:
4109 [0-9]+\s*y(?:ears?)?\s*
4110 )?
4111 (?:
4112 [0-9]+\s*m(?:onths?)?\s*
4113 )?
4114 (?:
4115 [0-9]+\s*w(?:eeks?)?\s*
4116 )?
8f4b58d7 4117 (?:
acaff495 4118 (?P<days>[0-9]+)\s*d(?:ays?)?\s*
8f4b58d7 4119 )?
056653bb 4120 T)?
acaff495 4121 (?:
4122 (?P<hours>[0-9]+)\s*h(?:ours?)?\s*
4123 )?
4124 (?:
4125 (?P<mins>[0-9]+)\s*m(?:in(?:ute)?s?)?\s*
4126 )?
4127 (?:
4128 (?P<secs>[0-9]+)(?P<ms>\.[0-9]+)?\s*s(?:ec(?:ond)?s?)?\s*
15846398 4129 )?Z?$''', s)
acaff495 4130 if m:
4131 days, hours, mins, secs, ms = m.groups()
4132 else:
15846398 4133 m = re.match(r'(?i)(?:(?P<hours>[0-9.]+)\s*(?:hours?)|(?P<mins>[0-9.]+)\s*(?:mins?\.?|minutes?)\s*)Z?$', s)
acaff495 4134 if m:
4135 hours, mins = m.groups()
4136 else:
4137 return None
4138
4139 duration = 0
4140 if secs:
4141 duration += float(secs)
4142 if mins:
4143 duration += float(mins) * 60
4144 if hours:
4145 duration += float(hours) * 60 * 60
4146 if days:
4147 duration += float(days) * 24 * 60 * 60
4148 if ms:
8bd1c00b 4149 duration += float(ms.replace(':', '.'))
acaff495 4150 return duration
91d7d0b3
JMF
4151
4152
e65e4c88 4153def prepend_extension(filename, ext, expected_real_ext=None):
5f6a1245 4154 name, real_ext = os.path.splitext(filename)
e65e4c88
S
4155 return (
4156 '{0}.{1}{2}'.format(name, ext, real_ext)
4157 if not expected_real_ext or real_ext[1:] == expected_real_ext
4158 else '{0}.{1}'.format(filename, ext))
d70ad093
PH
4159
4160
b3ed15b7
S
4161def replace_extension(filename, ext, expected_real_ext=None):
4162 name, real_ext = os.path.splitext(filename)
4163 return '{0}.{1}'.format(
4164 name if not expected_real_ext or real_ext[1:] == expected_real_ext else filename,
4165 ext)
4166
4167
d70ad093
PH
4168def check_executable(exe, args=[]):
4169 """ Checks if the given binary is installed somewhere in PATH, and returns its name.
4170 args can be a list of arguments for a short output (like -version) """
4171 try:
d3c93ec2 4172 Popen([exe] + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate_or_kill()
d70ad093
PH
4173 except OSError:
4174 return False
4175 return exe
b7ab0590
PH
4176
4177
9af98e17 4178def _get_exe_version_output(exe, args):
95807118 4179 try:
b64d04c1 4180 # STDIN should be redirected too. On UNIX-like systems, ffmpeg triggers
7a5c1cfe 4181 # SIGTTOU if yt-dlp is run in the background.
067aa17e 4182 # See https://github.com/ytdl-org/youtube-dl/issues/955#issuecomment-209789656
d3c93ec2 4183 out, _ = Popen(
4184 [encodeArgument(exe)] + args, stdin=subprocess.PIPE,
4185 stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate_or_kill()
95807118
PH
4186 except OSError:
4187 return False
cae97f65
PH
4188 if isinstance(out, bytes): # Python 2.x
4189 out = out.decode('ascii', 'ignore')
9af98e17 4190 return out
cae97f65
PH
4191
4192
4193def detect_exe_version(output, version_re=None, unrecognized='present'):
4194 assert isinstance(output, compat_str)
4195 if version_re is None:
4196 version_re = r'version\s+([-0-9._a-zA-Z]+)'
4197 m = re.search(version_re, output)
95807118
PH
4198 if m:
4199 return m.group(1)
4200 else:
4201 return unrecognized
4202
4203
9af98e17 4204def get_exe_version(exe, args=['--version'],
4205 version_re=None, unrecognized='present'):
4206 """ Returns the version of the specified executable,
4207 or False if the executable is not present """
4208 out = _get_exe_version_output(exe, args)
4209 return detect_exe_version(out, version_re, unrecognized) if out else False
4210
4211
cb89cfc1 4212class LazyList(collections.abc.Sequence):
483336e7 4213 ''' Lazy immutable list from an iterable
4214 Note that slices of a LazyList are lists and not LazyList'''
4215
8e5fecc8 4216 class IndexError(IndexError):
4217 pass
4218
282f5709 4219 def __init__(self, iterable, *, reverse=False, _cache=None):
483336e7 4220 self.__iterable = iter(iterable)
282f5709 4221 self.__cache = [] if _cache is None else _cache
4222 self.__reversed = reverse
483336e7 4223
4224 def __iter__(self):
28419ca2 4225 if self.__reversed:
4226 # We need to consume the entire iterable to iterate in reverse
981052c9 4227 yield from self.exhaust()
28419ca2 4228 return
4229 yield from self.__cache
483336e7 4230 for item in self.__iterable:
4231 self.__cache.append(item)
4232 yield item
4233
981052c9 4234 def __exhaust(self):
483336e7 4235 self.__cache.extend(self.__iterable)
9f1a1c36 4236 # Discard the emptied iterable to make it pickle-able
4237 self.__iterable = []
28419ca2 4238 return self.__cache
4239
981052c9 4240 def exhaust(self):
4241 ''' Evaluate the entire iterable '''
4242 return self.__exhaust()[::-1 if self.__reversed else 1]
4243
28419ca2 4244 @staticmethod
981052c9 4245 def __reverse_index(x):
e0f2b4b4 4246 return None if x is None else -(x + 1)
483336e7 4247
4248 def __getitem__(self, idx):
4249 if isinstance(idx, slice):
28419ca2 4250 if self.__reversed:
e0f2b4b4 4251 idx = slice(self.__reverse_index(idx.start), self.__reverse_index(idx.stop), -(idx.step or 1))
4252 start, stop, step = idx.start, idx.stop, idx.step or 1
483336e7 4253 elif isinstance(idx, int):
28419ca2 4254 if self.__reversed:
981052c9 4255 idx = self.__reverse_index(idx)
e0f2b4b4 4256 start, stop, step = idx, idx, 0
483336e7 4257 else:
4258 raise TypeError('indices must be integers or slices')
e0f2b4b4 4259 if ((start or 0) < 0 or (stop or 0) < 0
4260 or (start is None and step < 0)
4261 or (stop is None and step > 0)):
483336e7 4262 # We need to consume the entire iterable to be able to slice from the end
4263 # Obviously, never use this with infinite iterables
8e5fecc8 4264 self.__exhaust()
4265 try:
4266 return self.__cache[idx]
4267 except IndexError as e:
4268 raise self.IndexError(e) from e
e0f2b4b4 4269 n = max(start or 0, stop or 0) - len(self.__cache) + 1
28419ca2 4270 if n > 0:
4271 self.__cache.extend(itertools.islice(self.__iterable, n))
8e5fecc8 4272 try:
4273 return self.__cache[idx]
4274 except IndexError as e:
4275 raise self.IndexError(e) from e
483336e7 4276
4277 def __bool__(self):
4278 try:
28419ca2 4279 self[-1] if self.__reversed else self[0]
8e5fecc8 4280 except self.IndexError:
483336e7 4281 return False
4282 return True
4283
4284 def __len__(self):
8e5fecc8 4285 self.__exhaust()
483336e7 4286 return len(self.__cache)
4287
282f5709 4288 def __reversed__(self):
4289 return type(self)(self.__iterable, reverse=not self.__reversed, _cache=self.__cache)
4290
4291 def __copy__(self):
4292 return type(self)(self.__iterable, reverse=self.__reversed, _cache=self.__cache)
4293
28419ca2 4294 def __repr__(self):
4295 # repr and str should mimic a list. So we exhaust the iterable
4296 return repr(self.exhaust())
4297
4298 def __str__(self):
4299 return repr(self.exhaust())
4300
483336e7 4301
7be9ccff 4302class PagedList:
c07a39ae 4303
4304 class IndexError(IndexError):
4305 pass
4306
dd26ced1
PH
4307 def __len__(self):
4308 # This is only useful for tests
4309 return len(self.getslice())
4310
7be9ccff 4311 def __init__(self, pagefunc, pagesize, use_cache=True):
4312 self._pagefunc = pagefunc
4313 self._pagesize = pagesize
4314 self._use_cache = use_cache
4315 self._cache = {}
4316
4317 def getpage(self, pagenum):
d8cf8d97 4318 page_results = self._cache.get(pagenum)
4319 if page_results is None:
4320 page_results = list(self._pagefunc(pagenum))
7be9ccff 4321 if self._use_cache:
4322 self._cache[pagenum] = page_results
4323 return page_results
4324
4325 def getslice(self, start=0, end=None):
4326 return list(self._getslice(start, end))
4327
4328 def _getslice(self, start, end):
55575225 4329 raise NotImplementedError('This method must be implemented by subclasses')
4330
4331 def __getitem__(self, idx):
7be9ccff 4332 # NOTE: cache must be enabled if this is used
55575225 4333 if not isinstance(idx, int) or idx < 0:
4334 raise TypeError('indices must be non-negative integers')
4335 entries = self.getslice(idx, idx + 1)
d8cf8d97 4336 if not entries:
c07a39ae 4337 raise self.IndexError()
d8cf8d97 4338 return entries[0]
55575225 4339
9c44d242
PH
4340
4341class OnDemandPagedList(PagedList):
7be9ccff 4342 def _getslice(self, start, end):
b7ab0590
PH
4343 for pagenum in itertools.count(start // self._pagesize):
4344 firstid = pagenum * self._pagesize
4345 nextfirstid = pagenum * self._pagesize + self._pagesize
4346 if start >= nextfirstid:
4347 continue
4348
b7ab0590
PH
4349 startv = (
4350 start % self._pagesize
4351 if firstid <= start < nextfirstid
4352 else 0)
b7ab0590
PH
4353 endv = (
4354 ((end - 1) % self._pagesize) + 1
4355 if (end is not None and firstid <= end <= nextfirstid)
4356 else None)
4357
7be9ccff 4358 page_results = self.getpage(pagenum)
b7ab0590
PH
4359 if startv != 0 or endv is not None:
4360 page_results = page_results[startv:endv]
7be9ccff 4361 yield from page_results
b7ab0590
PH
4362
4363 # A little optimization - if current page is not "full", ie. does
4364 # not contain page_size videos then we can assume that this page
4365 # is the last one - there are no more ids on further pages -
4366 # i.e. no need to query again.
4367 if len(page_results) + startv < self._pagesize:
4368 break
4369
4370 # If we got the whole page, but the next page is not interesting,
4371 # break out early as well
4372 if end == nextfirstid:
4373 break
81c2f20b
PH
4374
4375
9c44d242
PH
4376class InAdvancePagedList(PagedList):
4377 def __init__(self, pagefunc, pagecount, pagesize):
9c44d242 4378 self._pagecount = pagecount
7be9ccff 4379 PagedList.__init__(self, pagefunc, pagesize, True)
9c44d242 4380
7be9ccff 4381 def _getslice(self, start, end):
9c44d242
PH
4382 start_page = start // self._pagesize
4383 end_page = (
4384 self._pagecount if end is None else (end // self._pagesize + 1))
4385 skip_elems = start - start_page * self._pagesize
4386 only_more = None if end is None else end - start
4387 for pagenum in range(start_page, end_page):
7be9ccff 4388 page_results = self.getpage(pagenum)
9c44d242 4389 if skip_elems:
7be9ccff 4390 page_results = page_results[skip_elems:]
9c44d242
PH
4391 skip_elems = None
4392 if only_more is not None:
7be9ccff 4393 if len(page_results) < only_more:
4394 only_more -= len(page_results)
9c44d242 4395 else:
7be9ccff 4396 yield from page_results[:only_more]
9c44d242 4397 break
7be9ccff 4398 yield from page_results
9c44d242
PH
4399
4400
81c2f20b 4401def uppercase_escape(s):
676eb3f2 4402 unicode_escape = codecs.getdecoder('unicode_escape')
81c2f20b 4403 return re.sub(
a612753d 4404 r'\\U[0-9a-fA-F]{8}',
676eb3f2
PH
4405 lambda m: unicode_escape(m.group(0))[0],
4406 s)
0fe2ff78
YCH
4407
4408
4409def lowercase_escape(s):
4410 unicode_escape = codecs.getdecoder('unicode_escape')
4411 return re.sub(
4412 r'\\u[0-9a-fA-F]{4}',
4413 lambda m: unicode_escape(m.group(0))[0],
4414 s)
b53466e1 4415
d05cfe06
S
4416
4417def escape_rfc3986(s):
4418 """Escape non-ASCII characters as suggested by RFC 3986"""
8f9312c3 4419 if sys.version_info < (3, 0) and isinstance(s, compat_str):
d05cfe06 4420 s = s.encode('utf-8')
ecc0c5ee 4421 return compat_urllib_parse.quote(s, b"%/;:@&=+$,!~*'()?#[]")
d05cfe06
S
4422
4423
4424def escape_url(url):
4425 """Escape URL as suggested by RFC 3986"""
4426 url_parsed = compat_urllib_parse_urlparse(url)
4427 return url_parsed._replace(
efbed08d 4428 netloc=url_parsed.netloc.encode('idna').decode('ascii'),
d05cfe06
S
4429 path=escape_rfc3986(url_parsed.path),
4430 params=escape_rfc3986(url_parsed.params),
4431 query=escape_rfc3986(url_parsed.query),
4432 fragment=escape_rfc3986(url_parsed.fragment)
4433 ).geturl()
4434
62e609ab 4435
4dfbf869 4436def parse_qs(url):
4437 return compat_parse_qs(compat_urllib_parse_urlparse(url).query)
4438
4439
62e609ab
PH
4440def read_batch_urls(batch_fd):
4441 def fixup(url):
4442 if not isinstance(url, compat_str):
4443 url = url.decode('utf-8', 'replace')
8c04f0be 4444 BOM_UTF8 = ('\xef\xbb\xbf', '\ufeff')
4445 for bom in BOM_UTF8:
4446 if url.startswith(bom):
4447 url = url[len(bom):]
4448 url = url.lstrip()
4449 if not url or url.startswith(('#', ';', ']')):
62e609ab 4450 return False
8c04f0be 4451 # "#" cannot be stripped out since it is part of the URI
4452 # However, it can be safely stipped out if follwing a whitespace
4453 return re.split(r'\s#', url, 1)[0].rstrip()
62e609ab
PH
4454
4455 with contextlib.closing(batch_fd) as fd:
4456 return [url for url in map(fixup, fd) if url]
b74fa8cd
JMF
4457
4458
4459def urlencode_postdata(*args, **kargs):
15707c7e 4460 return compat_urllib_parse_urlencode(*args, **kargs).encode('ascii')
bcf89ce6
PH
4461
4462
38f9ef31 4463def update_url_query(url, query):
cacd9966
YCH
4464 if not query:
4465 return url
38f9ef31 4466 parsed_url = compat_urlparse.urlparse(url)
4467 qs = compat_parse_qs(parsed_url.query)
4468 qs.update(query)
4469 return compat_urlparse.urlunparse(parsed_url._replace(
15707c7e 4470 query=compat_urllib_parse_urlencode(qs, True)))
16392824 4471
8e60dc75 4472
ed0291d1
S
4473def update_Request(req, url=None, data=None, headers={}, query={}):
4474 req_headers = req.headers.copy()
4475 req_headers.update(headers)
4476 req_data = data or req.data
4477 req_url = update_url_query(url or req.get_full_url(), query)
95cf60e8
S
4478 req_get_method = req.get_method()
4479 if req_get_method == 'HEAD':
4480 req_type = HEADRequest
4481 elif req_get_method == 'PUT':
4482 req_type = PUTRequest
4483 else:
4484 req_type = compat_urllib_request.Request
ed0291d1
S
4485 new_req = req_type(
4486 req_url, data=req_data, headers=req_headers,
4487 origin_req_host=req.origin_req_host, unverifiable=req.unverifiable)
4488 if hasattr(req, 'timeout'):
4489 new_req.timeout = req.timeout
4490 return new_req
4491
4492
10c87c15 4493def _multipart_encode_impl(data, boundary):
0c265486
YCH
4494 content_type = 'multipart/form-data; boundary=%s' % boundary
4495
4496 out = b''
4497 for k, v in data.items():
4498 out += b'--' + boundary.encode('ascii') + b'\r\n'
4499 if isinstance(k, compat_str):
4500 k = k.encode('utf-8')
4501 if isinstance(v, compat_str):
4502 v = v.encode('utf-8')
4503 # RFC 2047 requires non-ASCII field names to be encoded, while RFC 7578
4504 # suggests sending UTF-8 directly. Firefox sends UTF-8, too
b2ad479d 4505 content = b'Content-Disposition: form-data; name="' + k + b'"\r\n\r\n' + v + b'\r\n'
0c265486
YCH
4506 if boundary.encode('ascii') in content:
4507 raise ValueError('Boundary overlaps with data')
4508 out += content
4509
4510 out += b'--' + boundary.encode('ascii') + b'--\r\n'
4511
4512 return out, content_type
4513
4514
4515def multipart_encode(data, boundary=None):
4516 '''
4517 Encode a dict to RFC 7578-compliant form-data
4518
4519 data:
4520 A dict where keys and values can be either Unicode or bytes-like
4521 objects.
4522 boundary:
4523 If specified a Unicode object, it's used as the boundary. Otherwise
4524 a random boundary is generated.
4525
4526 Reference: https://tools.ietf.org/html/rfc7578
4527 '''
4528 has_specified_boundary = boundary is not None
4529
4530 while True:
4531 if boundary is None:
4532 boundary = '---------------' + str(random.randrange(0x0fffffff, 0xffffffff))
4533
4534 try:
10c87c15 4535 out, content_type = _multipart_encode_impl(data, boundary)
0c265486
YCH
4536 break
4537 except ValueError:
4538 if has_specified_boundary:
4539 raise
4540 boundary = None
4541
4542 return out, content_type
4543
4544
86296ad2 4545def dict_get(d, key_or_keys, default=None, skip_false_values=True):
cbecc9b9
S
4546 if isinstance(key_or_keys, (list, tuple)):
4547 for key in key_or_keys:
86296ad2
S
4548 if key not in d or d[key] is None or skip_false_values and not d[key]:
4549 continue
4550 return d[key]
cbecc9b9
S
4551 return default
4552 return d.get(key_or_keys, default)
4553
4554
329ca3be 4555def try_get(src, getter, expected_type=None):
6606817a 4556 for get in variadic(getter):
a32a9a7e
S
4557 try:
4558 v = get(src)
4559 except (AttributeError, KeyError, TypeError, IndexError):
4560 pass
4561 else:
4562 if expected_type is None or isinstance(v, expected_type):
4563 return v
329ca3be
S
4564
4565
6cc62232
S
4566def merge_dicts(*dicts):
4567 merged = {}
4568 for a_dict in dicts:
4569 for k, v in a_dict.items():
4570 if v is None:
4571 continue
3089bc74
S
4572 if (k not in merged
4573 or (isinstance(v, compat_str) and v
4574 and isinstance(merged[k], compat_str)
4575 and not merged[k])):
6cc62232
S
4576 merged[k] = v
4577 return merged
4578
4579
8e60dc75
S
4580def encode_compat_str(string, encoding=preferredencoding(), errors='strict'):
4581 return string if isinstance(string, compat_str) else compat_str(string, encoding, errors)
4582
16392824 4583
a1a530b0
PH
4584US_RATINGS = {
4585 'G': 0,
4586 'PG': 10,
4587 'PG-13': 13,
4588 'R': 16,
4589 'NC': 18,
4590}
fac55558
PH
4591
4592
a8795327 4593TV_PARENTAL_GUIDELINES = {
5a16c9d9
RA
4594 'TV-Y': 0,
4595 'TV-Y7': 7,
4596 'TV-G': 0,
4597 'TV-PG': 0,
4598 'TV-14': 14,
4599 'TV-MA': 17,
a8795327
S
4600}
4601
4602
146c80e2 4603def parse_age_limit(s):
a8795327
S
4604 if type(s) == int:
4605 return s if 0 <= s <= 21 else None
4606 if not isinstance(s, compat_basestring):
d838b1bd 4607 return None
146c80e2 4608 m = re.match(r'^(?P<age>\d{1,2})\+?$', s)
a8795327
S
4609 if m:
4610 return int(m.group('age'))
5c5fae6d 4611 s = s.upper()
a8795327
S
4612 if s in US_RATINGS:
4613 return US_RATINGS[s]
5a16c9d9 4614 m = re.match(r'^TV[_-]?(%s)$' % '|'.join(k[3:] for k in TV_PARENTAL_GUIDELINES), s)
b8361187 4615 if m:
5a16c9d9 4616 return TV_PARENTAL_GUIDELINES['TV-' + m.group(1)]
b8361187 4617 return None
146c80e2
S
4618
4619
fac55558 4620def strip_jsonp(code):
609a61e3 4621 return re.sub(
5552c9eb 4622 r'''(?sx)^
e9c671d5 4623 (?:window\.)?(?P<func_name>[a-zA-Z0-9_.$]*)
5552c9eb
YCH
4624 (?:\s*&&\s*(?P=func_name))?
4625 \s*\(\s*(?P<callback_data>.*)\);?
4626 \s*?(?://[^\n]*)*$''',
4627 r'\g<callback_data>', code)
478c2c61
PH
4628
4629
5c610515 4630def js_to_json(code, vars={}):
4631 # vars is a dict of var, val pairs to substitute
c843e685 4632 COMMENT_RE = r'/\*(?:(?!\*/).)*?\*/|//[^\n]*\n'
4195096e
S
4633 SKIP_RE = r'\s*(?:{comment})?\s*'.format(comment=COMMENT_RE)
4634 INTEGER_TABLE = (
4635 (r'(?s)^(0[xX][0-9a-fA-F]+){skip}:?$'.format(skip=SKIP_RE), 16),
4636 (r'(?s)^(0+[0-7]+){skip}:?$'.format(skip=SKIP_RE), 8),
4637 )
4638
e05f6939 4639 def fix_kv(m):
e7b6d122
PH
4640 v = m.group(0)
4641 if v in ('true', 'false', 'null'):
4642 return v
421ddcb8
C
4643 elif v in ('undefined', 'void 0'):
4644 return 'null'
8bdd16b4 4645 elif v.startswith('/*') or v.startswith('//') or v.startswith('!') or v == ',':
bd1e4844 4646 return ""
4647
4648 if v[0] in ("'", '"'):
4649 v = re.sub(r'(?s)\\.|"', lambda m: {
e7b6d122 4650 '"': '\\"',
bd1e4844 4651 "\\'": "'",
4652 '\\\n': '',
4653 '\\x': '\\u00',
4654 }.get(m.group(0), m.group(0)), v[1:-1])
8bdd16b4 4655 else:
4656 for regex, base in INTEGER_TABLE:
4657 im = re.match(regex, v)
4658 if im:
4659 i = int(im.group(1), base)
4660 return '"%d":' % i if v.endswith(':') else '%d' % i
89ac4a19 4661
5c610515 4662 if v in vars:
4663 return vars[v]
4664
e7b6d122 4665 return '"%s"' % v
e05f6939 4666
bd1e4844 4667 return re.sub(r'''(?sx)
4668 "(?:[^"\\]*(?:\\\\|\\['"nurtbfx/\n]))*[^"\\]*"|
4669 '(?:[^'\\]*(?:\\\\|\\['"nurtbfx/\n]))*[^'\\]*'|
4195096e 4670 {comment}|,(?={skip}[\]}}])|
421ddcb8 4671 void\s0|(?:(?<![0-9])[eE]|[a-df-zA-DF-Z_$])[.a-zA-Z_$0-9]*|
4195096e 4672 \b(?:0[xX][0-9a-fA-F]+|0+[0-7]+)(?:{skip}:)?|
8bdd16b4 4673 [0-9]+(?={skip}:)|
4674 !+
4195096e 4675 '''.format(comment=COMMENT_RE, skip=SKIP_RE), fix_kv, code)
e05f6939
PH
4676
4677
478c2c61
PH
4678def qualities(quality_ids):
4679 """ Get a numeric quality value out of a list of possible values """
4680 def q(qid):
4681 try:
4682 return quality_ids.index(qid)
4683 except ValueError:
4684 return -1
4685 return q
4686
acd69589 4687
ebed8b37 4688POSTPROCESS_WHEN = {'pre_process', 'before_dl', 'after_move', 'post_process', 'after_video', 'playlist'}
1e43a6f7 4689
4690
de6000d9 4691DEFAULT_OUTTMPL = {
4692 'default': '%(title)s [%(id)s].%(ext)s',
72755351 4693 'chapter': '%(title)s - %(section_number)03d %(section_title)s [%(id)s].%(ext)s',
de6000d9 4694}
4695OUTTMPL_TYPES = {
72755351 4696 'chapter': None,
de6000d9 4697 'subtitle': None,
4698 'thumbnail': None,
4699 'description': 'description',
4700 'annotation': 'annotations.xml',
4701 'infojson': 'info.json',
08438d2c 4702 'link': None,
3b603dbd 4703 'pl_video': None,
5112f26a 4704 'pl_thumbnail': None,
de6000d9 4705 'pl_description': 'description',
4706 'pl_infojson': 'info.json',
4707}
0a871f68 4708
143db31d 4709# As of [1] format syntax is:
4710# %[mapping_key][conversion_flags][minimum_width][.precision][length_modifier]type
4711# 1. https://docs.python.org/2/library/stdtypes.html#string-formatting
901130bb 4712STR_FORMAT_RE_TMPL = r'''(?x)
4713 (?<!%)(?P<prefix>(?:%%)*)
143db31d 4714 %
524e2e4f 4715 (?P<has_key>\((?P<key>{0})\))?
752cda38 4716 (?P<format>
524e2e4f 4717 (?P<conversion>[#0\-+ ]+)?
4718 (?P<min_width>\d+)?
4719 (?P<precision>\.\d+)?
4720 (?P<len_mod>[hlL])? # unused in python
901130bb 4721 {1} # conversion type
752cda38 4722 )
143db31d 4723'''
4724
7d1eb38a 4725
901130bb 4726STR_FORMAT_TYPES = 'diouxXeEfFgGcrs'
a020a0dc 4727
7d1eb38a 4728
a020a0dc
PH
4729def limit_length(s, length):
4730 """ Add ellipses to overly long strings """
4731 if s is None:
4732 return None
4733 ELLIPSES = '...'
4734 if len(s) > length:
4735 return s[:length - len(ELLIPSES)] + ELLIPSES
4736 return s
48844745
PH
4737
4738
4739def version_tuple(v):
5f9b8394 4740 return tuple(int(e) for e in re.split(r'[-.]', v))
48844745
PH
4741
4742
4743def is_outdated_version(version, limit, assume_new=True):
4744 if not version:
4745 return not assume_new
4746 try:
4747 return version_tuple(version) < version_tuple(limit)
4748 except ValueError:
4749 return not assume_new
732ea2f0
PH
4750
4751
4752def ytdl_is_updateable():
7a5c1cfe 4753 """ Returns if yt-dlp can be updated with -U """
735d865e 4754
5d535b4a 4755 from .update import is_non_updateable
732ea2f0 4756
5d535b4a 4757 return not is_non_updateable()
7d4111ed
PH
4758
4759
4760def args_to_str(args):
4761 # Get a short string representation for a subprocess command
702ccf2d 4762 return ' '.join(compat_shlex_quote(a) for a in args)
2ccd1b10
PH
4763
4764
9b9c5355 4765def error_to_compat_str(err):
fdae2358
S
4766 err_str = str(err)
4767 # On python 2 error byte string must be decoded with proper
4768 # encoding rather than ascii
4769 if sys.version_info[0] < 3:
4770 err_str = err_str.decode(preferredencoding())
4771 return err_str
4772
4773
c460bdd5 4774def mimetype2ext(mt):
eb9ee194
S
4775 if mt is None:
4776 return None
4777
9359f3d4
F
4778 mt, _, params = mt.partition(';')
4779 mt = mt.strip()
4780
4781 FULL_MAP = {
765ac263 4782 'audio/mp4': 'm4a',
6c33d24b
YCH
4783 # Per RFC 3003, audio/mpeg can be .mp1, .mp2 or .mp3. Here use .mp3 as
4784 # it's the most popular one
4785 'audio/mpeg': 'mp3',
ba39289d 4786 'audio/x-wav': 'wav',
9359f3d4
F
4787 'audio/wav': 'wav',
4788 'audio/wave': 'wav',
4789 }
4790
4791 ext = FULL_MAP.get(mt)
765ac263
JMF
4792 if ext is not None:
4793 return ext
4794
9359f3d4 4795 SUBTYPE_MAP = {
f6861ec9 4796 '3gpp': '3gp',
cafcf657 4797 'smptett+xml': 'tt',
cafcf657 4798 'ttaf+xml': 'dfxp',
a0d8d704 4799 'ttml+xml': 'ttml',
f6861ec9 4800 'x-flv': 'flv',
a0d8d704 4801 'x-mp4-fragmented': 'mp4',
d4f05d47 4802 'x-ms-sami': 'sami',
a0d8d704 4803 'x-ms-wmv': 'wmv',
b4173f15
RA
4804 'mpegurl': 'm3u8',
4805 'x-mpegurl': 'm3u8',
4806 'vnd.apple.mpegurl': 'm3u8',
4807 'dash+xml': 'mpd',
b4173f15 4808 'f4m+xml': 'f4m',
f164b971 4809 'hds+xml': 'f4m',
e910fe2f 4810 'vnd.ms-sstr+xml': 'ism',
c2b2c7e1 4811 'quicktime': 'mov',
98ce1a3f 4812 'mp2t': 'ts',
39e7107d 4813 'x-wav': 'wav',
9359f3d4
F
4814 'filmstrip+json': 'fs',
4815 'svg+xml': 'svg',
4816 }
4817
4818 _, _, subtype = mt.rpartition('/')
4819 ext = SUBTYPE_MAP.get(subtype.lower())
4820 if ext is not None:
4821 return ext
4822
4823 SUFFIX_MAP = {
4824 'json': 'json',
4825 'xml': 'xml',
4826 'zip': 'zip',
4827 'gzip': 'gz',
4828 }
4829
4830 _, _, suffix = subtype.partition('+')
4831 ext = SUFFIX_MAP.get(suffix)
4832 if ext is not None:
4833 return ext
4834
4835 return subtype.replace('+', '.')
c460bdd5
PH
4836
4837
2814f12b
THD
4838def ext2mimetype(ext_or_url):
4839 if not ext_or_url:
4840 return None
4841 if '.' not in ext_or_url:
4842 ext_or_url = f'file.{ext_or_url}'
4843 return mimetypes.guess_type(ext_or_url)[0]
4844
4845
4f3c5e06 4846def parse_codecs(codecs_str):
4847 # http://tools.ietf.org/html/rfc6381
4848 if not codecs_str:
4849 return {}
a0566bbf 4850 split_codecs = list(filter(None, map(
dbf5416a 4851 str.strip, codecs_str.strip().strip(',').split(','))))
4afa3ec4 4852 vcodec, acodec, tcodec, hdr = None, None, None, None
a0566bbf 4853 for full_codec in split_codecs:
9bd979ca 4854 parts = full_codec.split('.')
4855 codec = parts[0].replace('0', '')
4856 if codec in ('avc1', 'avc2', 'avc3', 'avc4', 'vp9', 'vp8', 'hev1', 'hev2',
4857 'h263', 'h264', 'mp4v', 'hvc1', 'av1', 'theora', 'dvh1', 'dvhe'):
4f3c5e06 4858 if not vcodec:
b69fd25c 4859 vcodec = '.'.join(parts[:4]) if codec in ('vp9', 'av1', 'hvc1') else full_codec
176f1866 4860 if codec in ('dvh1', 'dvhe'):
4861 hdr = 'DV'
9bd979ca 4862 elif codec == 'av1' and len(parts) > 3 and parts[3] == '10':
4863 hdr = 'HDR10'
4864 elif full_codec.replace('0', '').startswith('vp9.2'):
176f1866 4865 hdr = 'HDR10'
b69fd25c 4866 elif codec in ('flac', 'mp4a', 'opus', 'vorbis', 'mp3', 'aac', 'ac-3', 'ec-3', 'eac3', 'dtsc', 'dtse', 'dtsh', 'dtsl'):
4f3c5e06 4867 if not acodec:
4868 acodec = full_codec
4afa3ec4
F
4869 elif codec in ('stpp', 'wvtt',):
4870 if not tcodec:
4871 tcodec = full_codec
4f3c5e06 4872 else:
60f5c9fb 4873 write_string('WARNING: Unknown codec %s\n' % full_codec, sys.stderr)
4afa3ec4 4874 if vcodec or acodec or tcodec:
4f3c5e06 4875 return {
4876 'vcodec': vcodec or 'none',
4877 'acodec': acodec or 'none',
176f1866 4878 'dynamic_range': hdr,
4afa3ec4 4879 **({'tcodec': tcodec} if tcodec is not None else {}),
4f3c5e06 4880 }
b69fd25c 4881 elif len(split_codecs) == 2:
4882 return {
4883 'vcodec': split_codecs[0],
4884 'acodec': split_codecs[1],
4885 }
4f3c5e06 4886 return {}
4887
4888
2ccd1b10 4889def urlhandle_detect_ext(url_handle):
79298173 4890 getheader = url_handle.headers.get
2ccd1b10 4891
b55ee18f
PH
4892 cd = getheader('Content-Disposition')
4893 if cd:
4894 m = re.match(r'attachment;\s*filename="(?P<filename>[^"]+)"', cd)
4895 if m:
4896 e = determine_ext(m.group('filename'), default_ext=None)
4897 if e:
4898 return e
4899
c460bdd5 4900 return mimetype2ext(getheader('Content-Type'))
05900629
PH
4901
4902
1e399778
YCH
4903def encode_data_uri(data, mime_type):
4904 return 'data:%s;base64,%s' % (mime_type, base64.b64encode(data).decode('ascii'))
4905
4906
05900629 4907def age_restricted(content_limit, age_limit):
6ec6cb4e 4908 """ Returns True iff the content should be blocked """
05900629
PH
4909
4910 if age_limit is None: # No limit set
4911 return False
4912 if content_limit is None:
4913 return False # Content available for everyone
4914 return age_limit < content_limit
61ca9a80
PH
4915
4916
4917def is_html(first_bytes):
4918 """ Detect whether a file contains HTML by examining its first bytes. """
4919
4920 BOMS = [
4921 (b'\xef\xbb\xbf', 'utf-8'),
4922 (b'\x00\x00\xfe\xff', 'utf-32-be'),
4923 (b'\xff\xfe\x00\x00', 'utf-32-le'),
4924 (b'\xff\xfe', 'utf-16-le'),
4925 (b'\xfe\xff', 'utf-16-be'),
4926 ]
4927 for bom, enc in BOMS:
4928 if first_bytes.startswith(bom):
4929 s = first_bytes[len(bom):].decode(enc, 'replace')
4930 break
4931 else:
4932 s = first_bytes.decode('utf-8', 'replace')
4933
4934 return re.match(r'^\s*<', s)
a055469f
PH
4935
4936
4937def determine_protocol(info_dict):
4938 protocol = info_dict.get('protocol')
4939 if protocol is not None:
4940 return protocol
4941
7de837a5 4942 url = sanitize_url(info_dict['url'])
a055469f
PH
4943 if url.startswith('rtmp'):
4944 return 'rtmp'
4945 elif url.startswith('mms'):
4946 return 'mms'
4947 elif url.startswith('rtsp'):
4948 return 'rtsp'
4949
4950 ext = determine_ext(url)
4951 if ext == 'm3u8':
4952 return 'm3u8'
4953 elif ext == 'f4m':
4954 return 'f4m'
4955
4956 return compat_urllib_parse_urlparse(url).scheme
cfb56d1a
PH
4957
4958
c5e3f849 4959def render_table(header_row, data, delim=False, extra_gap=0, hide_empty=False):
4960 """ Render a list of rows, each as a list of values.
4961 Text after a \t will be right aligned """
ec11a9f4 4962 def width(string):
c5e3f849 4963 return len(remove_terminal_sequences(string).replace('\t', ''))
76d321f6 4964
4965 def get_max_lens(table):
ec11a9f4 4966 return [max(width(str(v)) for v in col) for col in zip(*table)]
76d321f6 4967
4968 def filter_using_list(row, filterArray):
4969 return [col for (take, col) in zip(filterArray, row) if take]
4970
c5e3f849 4971 if hide_empty:
76d321f6 4972 max_lens = get_max_lens(data)
4973 header_row = filter_using_list(header_row, max_lens)
4974 data = [filter_using_list(row, max_lens) for row in data]
4975
cfb56d1a 4976 table = [header_row] + data
76d321f6 4977 max_lens = get_max_lens(table)
c5e3f849 4978 extra_gap += 1
76d321f6 4979 if delim:
c5e3f849 4980 table = [header_row, [delim * (ml + extra_gap) for ml in max_lens]] + data
4981 table[1][-1] = table[1][-1][:-extra_gap] # Remove extra_gap from end of delimiter
ec11a9f4 4982 for row in table:
4983 for pos, text in enumerate(map(str, row)):
c5e3f849 4984 if '\t' in text:
4985 row[pos] = text.replace('\t', ' ' * (max_lens[pos] - width(text))) + ' ' * extra_gap
4986 else:
4987 row[pos] = text + ' ' * (max_lens[pos] - width(text) + extra_gap)
4988 ret = '\n'.join(''.join(row).rstrip() for row in table)
ec11a9f4 4989 return ret
347de493
PH
4990
4991
8f18aca8 4992def _match_one(filter_part, dct, incomplete):
77b87f05 4993 # TODO: Generalize code with YoutubeDL._build_format_filter
a047eeb6 4994 STRING_OPERATORS = {
4995 '*=': operator.contains,
4996 '^=': lambda attr, value: attr.startswith(value),
4997 '$=': lambda attr, value: attr.endswith(value),
4998 '~=': lambda attr, value: re.search(value, attr),
4999 }
347de493 5000 COMPARISON_OPERATORS = {
a047eeb6 5001 **STRING_OPERATORS,
5002 '<=': operator.le, # "<=" must be defined above "<"
347de493 5003 '<': operator.lt,
347de493 5004 '>=': operator.ge,
a047eeb6 5005 '>': operator.gt,
347de493 5006 '=': operator.eq,
347de493 5007 }
a047eeb6 5008
347de493
PH
5009 operator_rex = re.compile(r'''(?x)\s*
5010 (?P<key>[a-z_]+)
77b87f05 5011 \s*(?P<negation>!\s*)?(?P<op>%s)(?P<none_inclusive>\s*\?)?\s*
347de493 5012 (?:
a047eeb6 5013 (?P<quote>["\'])(?P<quotedstrval>.+?)(?P=quote)|
5014 (?P<strval>.+?)
347de493
PH
5015 )
5016 \s*$
5017 ''' % '|'.join(map(re.escape, COMPARISON_OPERATORS.keys())))
5018 m = operator_rex.search(filter_part)
5019 if m:
18f96d12 5020 m = m.groupdict()
5021 unnegated_op = COMPARISON_OPERATORS[m['op']]
5022 if m['negation']:
77b87f05
MT
5023 op = lambda attr, value: not unnegated_op(attr, value)
5024 else:
5025 op = unnegated_op
18f96d12 5026 comparison_value = m['quotedstrval'] or m['strval'] or m['intval']
5027 if m['quote']:
5028 comparison_value = comparison_value.replace(r'\%s' % m['quote'], m['quote'])
5029 actual_value = dct.get(m['key'])
5030 numeric_comparison = None
5031 if isinstance(actual_value, compat_numeric_types):
e5a088dc
S
5032 # If the original field is a string and matching comparisonvalue is
5033 # a number we should respect the origin of the original field
5034 # and process comparison value as a string (see
18f96d12 5035 # https://github.com/ytdl-org/youtube-dl/issues/11082)
347de493 5036 try:
18f96d12 5037 numeric_comparison = int(comparison_value)
347de493 5038 except ValueError:
18f96d12 5039 numeric_comparison = parse_filesize(comparison_value)
5040 if numeric_comparison is None:
5041 numeric_comparison = parse_filesize(f'{comparison_value}B')
5042 if numeric_comparison is None:
5043 numeric_comparison = parse_duration(comparison_value)
5044 if numeric_comparison is not None and m['op'] in STRING_OPERATORS:
5045 raise ValueError('Operator %s only supports string values!' % m['op'])
347de493 5046 if actual_value is None:
18f96d12 5047 return incomplete or m['none_inclusive']
5048 return op(actual_value, comparison_value if numeric_comparison is None else numeric_comparison)
347de493
PH
5049
5050 UNARY_OPERATORS = {
1cc47c66
S
5051 '': lambda v: (v is True) if isinstance(v, bool) else (v is not None),
5052 '!': lambda v: (v is False) if isinstance(v, bool) else (v is None),
347de493
PH
5053 }
5054 operator_rex = re.compile(r'''(?x)\s*
5055 (?P<op>%s)\s*(?P<key>[a-z_]+)
5056 \s*$
5057 ''' % '|'.join(map(re.escape, UNARY_OPERATORS.keys())))
5058 m = operator_rex.search(filter_part)
5059 if m:
5060 op = UNARY_OPERATORS[m.group('op')]
5061 actual_value = dct.get(m.group('key'))
8f18aca8 5062 if incomplete and actual_value is None:
5063 return True
347de493
PH
5064 return op(actual_value)
5065
5066 raise ValueError('Invalid filter part %r' % filter_part)
5067
5068
8f18aca8 5069def match_str(filter_str, dct, incomplete=False):
5070 """ Filter a dictionary with a simple string syntax. Returns True (=passes filter) or false
5071 When incomplete, all conditions passes on missing fields
5072 """
347de493 5073 return all(
8f18aca8 5074 _match_one(filter_part.replace(r'\&', '&'), dct, incomplete)
a047eeb6 5075 for filter_part in re.split(r'(?<!\\)&', filter_str))
347de493
PH
5076
5077
5078def match_filter_func(filter_str):
8f18aca8 5079 def _match_func(info_dict, *args, **kwargs):
5080 if match_str(filter_str, info_dict, *args, **kwargs):
347de493
PH
5081 return None
5082 else:
5083 video_title = info_dict.get('title', info_dict.get('id', 'video'))
5084 return '%s does not pass filter %s, skipping ..' % (video_title, filter_str)
5085 return _match_func
91410c9b
PH
5086
5087
bf6427d2
YCH
5088def parse_dfxp_time_expr(time_expr):
5089 if not time_expr:
d631d5f9 5090 return
bf6427d2
YCH
5091
5092 mobj = re.match(r'^(?P<time_offset>\d+(?:\.\d+)?)s?$', time_expr)
5093 if mobj:
5094 return float(mobj.group('time_offset'))
5095
db2fe38b 5096 mobj = re.match(r'^(\d+):(\d\d):(\d\d(?:(?:\.|:)\d+)?)$', time_expr)
bf6427d2 5097 if mobj:
db2fe38b 5098 return 3600 * int(mobj.group(1)) + 60 * int(mobj.group(2)) + float(mobj.group(3).replace(':', '.'))
bf6427d2
YCH
5099
5100
c1c924ab 5101def srt_subtitles_timecode(seconds):
aa7785f8 5102 return '%02d:%02d:%02d,%03d' % timetuple_from_msec(seconds * 1000)
5103
5104
5105def ass_subtitles_timecode(seconds):
5106 time = timetuple_from_msec(seconds * 1000)
5107 return '%01d:%02d:%02d.%02d' % (*time[:-1], time.milliseconds / 10)
bf6427d2
YCH
5108
5109
5110def dfxp2srt(dfxp_data):
3869028f
YCH
5111 '''
5112 @param dfxp_data A bytes-like object containing DFXP data
5113 @returns A unicode object containing converted SRT data
5114 '''
5b995f71 5115 LEGACY_NAMESPACES = (
3869028f
YCH
5116 (b'http://www.w3.org/ns/ttml', [
5117 b'http://www.w3.org/2004/11/ttaf1',
5118 b'http://www.w3.org/2006/04/ttaf1',
5119 b'http://www.w3.org/2006/10/ttaf1',
5b995f71 5120 ]),
3869028f
YCH
5121 (b'http://www.w3.org/ns/ttml#styling', [
5122 b'http://www.w3.org/ns/ttml#style',
5b995f71
RA
5123 ]),
5124 )
5125
5126 SUPPORTED_STYLING = [
5127 'color',
5128 'fontFamily',
5129 'fontSize',
5130 'fontStyle',
5131 'fontWeight',
5132 'textDecoration'
5133 ]
5134
4e335771 5135 _x = functools.partial(xpath_with_ns, ns_map={
261f4730 5136 'xml': 'http://www.w3.org/XML/1998/namespace',
4e335771 5137 'ttml': 'http://www.w3.org/ns/ttml',
5b995f71 5138 'tts': 'http://www.w3.org/ns/ttml#styling',
4e335771 5139 })
bf6427d2 5140
5b995f71
RA
5141 styles = {}
5142 default_style = {}
5143
87de7069 5144 class TTMLPElementParser(object):
5b995f71
RA
5145 _out = ''
5146 _unclosed_elements = []
5147 _applied_styles = []
bf6427d2 5148
2b14cb56 5149 def start(self, tag, attrib):
5b995f71
RA
5150 if tag in (_x('ttml:br'), 'br'):
5151 self._out += '\n'
5152 else:
5153 unclosed_elements = []
5154 style = {}
5155 element_style_id = attrib.get('style')
5156 if default_style:
5157 style.update(default_style)
5158 if element_style_id:
5159 style.update(styles.get(element_style_id, {}))
5160 for prop in SUPPORTED_STYLING:
5161 prop_val = attrib.get(_x('tts:' + prop))
5162 if prop_val:
5163 style[prop] = prop_val
5164 if style:
5165 font = ''
5166 for k, v in sorted(style.items()):
5167 if self._applied_styles and self._applied_styles[-1].get(k) == v:
5168 continue
5169 if k == 'color':
5170 font += ' color="%s"' % v
5171 elif k == 'fontSize':
5172 font += ' size="%s"' % v
5173 elif k == 'fontFamily':
5174 font += ' face="%s"' % v
5175 elif k == 'fontWeight' and v == 'bold':
5176 self._out += '<b>'
5177 unclosed_elements.append('b')
5178 elif k == 'fontStyle' and v == 'italic':
5179 self._out += '<i>'
5180 unclosed_elements.append('i')
5181 elif k == 'textDecoration' and v == 'underline':
5182 self._out += '<u>'
5183 unclosed_elements.append('u')
5184 if font:
5185 self._out += '<font' + font + '>'
5186 unclosed_elements.append('font')
5187 applied_style = {}
5188 if self._applied_styles:
5189 applied_style.update(self._applied_styles[-1])
5190 applied_style.update(style)
5191 self._applied_styles.append(applied_style)
5192 self._unclosed_elements.append(unclosed_elements)
bf6427d2 5193
2b14cb56 5194 def end(self, tag):
5b995f71
RA
5195 if tag not in (_x('ttml:br'), 'br'):
5196 unclosed_elements = self._unclosed_elements.pop()
5197 for element in reversed(unclosed_elements):
5198 self._out += '</%s>' % element
5199 if unclosed_elements and self._applied_styles:
5200 self._applied_styles.pop()
bf6427d2 5201
2b14cb56 5202 def data(self, data):
5b995f71 5203 self._out += data
2b14cb56 5204
5205 def close(self):
5b995f71 5206 return self._out.strip()
2b14cb56 5207
5208 def parse_node(node):
5209 target = TTMLPElementParser()
5210 parser = xml.etree.ElementTree.XMLParser(target=target)
5211 parser.feed(xml.etree.ElementTree.tostring(node))
5212 return parser.close()
bf6427d2 5213
5b995f71
RA
5214 for k, v in LEGACY_NAMESPACES:
5215 for ns in v:
5216 dfxp_data = dfxp_data.replace(ns, k)
5217
3869028f 5218 dfxp = compat_etree_fromstring(dfxp_data)
bf6427d2 5219 out = []
5b995f71 5220 paras = dfxp.findall(_x('.//ttml:p')) or dfxp.findall('.//p')
1b0427e6
YCH
5221
5222 if not paras:
5223 raise ValueError('Invalid dfxp/TTML subtitle')
bf6427d2 5224
5b995f71
RA
5225 repeat = False
5226 while True:
5227 for style in dfxp.findall(_x('.//ttml:style')):
261f4730
RA
5228 style_id = style.get('id') or style.get(_x('xml:id'))
5229 if not style_id:
5230 continue
5b995f71
RA
5231 parent_style_id = style.get('style')
5232 if parent_style_id:
5233 if parent_style_id not in styles:
5234 repeat = True
5235 continue
5236 styles[style_id] = styles[parent_style_id].copy()
5237 for prop in SUPPORTED_STYLING:
5238 prop_val = style.get(_x('tts:' + prop))
5239 if prop_val:
5240 styles.setdefault(style_id, {})[prop] = prop_val
5241 if repeat:
5242 repeat = False
5243 else:
5244 break
5245
5246 for p in ('body', 'div'):
5247 ele = xpath_element(dfxp, [_x('.//ttml:' + p), './/' + p])
5248 if ele is None:
5249 continue
5250 style = styles.get(ele.get('style'))
5251 if not style:
5252 continue
5253 default_style.update(style)
5254
bf6427d2 5255 for para, index in zip(paras, itertools.count(1)):
d631d5f9 5256 begin_time = parse_dfxp_time_expr(para.attrib.get('begin'))
7dff0363 5257 end_time = parse_dfxp_time_expr(para.attrib.get('end'))
d631d5f9
YCH
5258 dur = parse_dfxp_time_expr(para.attrib.get('dur'))
5259 if begin_time is None:
5260 continue
7dff0363 5261 if not end_time:
d631d5f9
YCH
5262 if not dur:
5263 continue
5264 end_time = begin_time + dur
bf6427d2
YCH
5265 out.append('%d\n%s --> %s\n%s\n\n' % (
5266 index,
c1c924ab
YCH
5267 srt_subtitles_timecode(begin_time),
5268 srt_subtitles_timecode(end_time),
bf6427d2
YCH
5269 parse_node(para)))
5270
5271 return ''.join(out)
5272
5273
66e289ba
S
5274def cli_option(params, command_option, param):
5275 param = params.get(param)
98e698f1
RA
5276 if param:
5277 param = compat_str(param)
66e289ba
S
5278 return [command_option, param] if param is not None else []
5279
5280
5281def cli_bool_option(params, command_option, param, true_value='true', false_value='false', separator=None):
5282 param = params.get(param)
5b232f46
S
5283 if param is None:
5284 return []
66e289ba
S
5285 assert isinstance(param, bool)
5286 if separator:
5287 return [command_option + separator + (true_value if param else false_value)]
5288 return [command_option, true_value if param else false_value]
5289
5290
5291def cli_valueless_option(params, command_option, param, expected_value=True):
5292 param = params.get(param)
5293 return [command_option] if param == expected_value else []
5294
5295
e92caff5 5296def cli_configuration_args(argdict, keys, default=[], use_compat=True):
eab9b2bc 5297 if isinstance(argdict, (list, tuple)): # for backward compatibility
e92caff5 5298 if use_compat:
5b1ecbb3 5299 return argdict
5300 else:
5301 argdict = None
eab9b2bc 5302 if argdict is None:
5b1ecbb3 5303 return default
eab9b2bc 5304 assert isinstance(argdict, dict)
5305
e92caff5 5306 assert isinstance(keys, (list, tuple))
5307 for key_list in keys:
e92caff5 5308 arg_list = list(filter(
5309 lambda x: x is not None,
6606817a 5310 [argdict.get(key.lower()) for key in variadic(key_list)]))
e92caff5 5311 if arg_list:
5312 return [arg for args in arg_list for arg in args]
5313 return default
66e289ba 5314
6251555f 5315
330690a2 5316def _configuration_args(main_key, argdict, exe, keys=None, default=[], use_compat=True):
5317 main_key, exe = main_key.lower(), exe.lower()
5318 root_key = exe if main_key == exe else f'{main_key}+{exe}'
5319 keys = [f'{root_key}{k}' for k in (keys or [''])]
5320 if root_key in keys:
5321 if main_key != exe:
5322 keys.append((main_key, exe))
5323 keys.append('default')
5324 else:
5325 use_compat = False
5326 return cli_configuration_args(argdict, keys, default, use_compat)
5327
66e289ba 5328
39672624
YCH
5329class ISO639Utils(object):
5330 # See http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt
5331 _lang_map = {
5332 'aa': 'aar',
5333 'ab': 'abk',
5334 'ae': 'ave',
5335 'af': 'afr',
5336 'ak': 'aka',
5337 'am': 'amh',
5338 'an': 'arg',
5339 'ar': 'ara',
5340 'as': 'asm',
5341 'av': 'ava',
5342 'ay': 'aym',
5343 'az': 'aze',
5344 'ba': 'bak',
5345 'be': 'bel',
5346 'bg': 'bul',
5347 'bh': 'bih',
5348 'bi': 'bis',
5349 'bm': 'bam',
5350 'bn': 'ben',
5351 'bo': 'bod',
5352 'br': 'bre',
5353 'bs': 'bos',
5354 'ca': 'cat',
5355 'ce': 'che',
5356 'ch': 'cha',
5357 'co': 'cos',
5358 'cr': 'cre',
5359 'cs': 'ces',
5360 'cu': 'chu',
5361 'cv': 'chv',
5362 'cy': 'cym',
5363 'da': 'dan',
5364 'de': 'deu',
5365 'dv': 'div',
5366 'dz': 'dzo',
5367 'ee': 'ewe',
5368 'el': 'ell',
5369 'en': 'eng',
5370 'eo': 'epo',
5371 'es': 'spa',
5372 'et': 'est',
5373 'eu': 'eus',
5374 'fa': 'fas',
5375 'ff': 'ful',
5376 'fi': 'fin',
5377 'fj': 'fij',
5378 'fo': 'fao',
5379 'fr': 'fra',
5380 'fy': 'fry',
5381 'ga': 'gle',
5382 'gd': 'gla',
5383 'gl': 'glg',
5384 'gn': 'grn',
5385 'gu': 'guj',
5386 'gv': 'glv',
5387 'ha': 'hau',
5388 'he': 'heb',
b7acc835 5389 'iw': 'heb', # Replaced by he in 1989 revision
39672624
YCH
5390 'hi': 'hin',
5391 'ho': 'hmo',
5392 'hr': 'hrv',
5393 'ht': 'hat',
5394 'hu': 'hun',
5395 'hy': 'hye',
5396 'hz': 'her',
5397 'ia': 'ina',
5398 'id': 'ind',
b7acc835 5399 'in': 'ind', # Replaced by id in 1989 revision
39672624
YCH
5400 'ie': 'ile',
5401 'ig': 'ibo',
5402 'ii': 'iii',
5403 'ik': 'ipk',
5404 'io': 'ido',
5405 'is': 'isl',
5406 'it': 'ita',
5407 'iu': 'iku',
5408 'ja': 'jpn',
5409 'jv': 'jav',
5410 'ka': 'kat',
5411 'kg': 'kon',
5412 'ki': 'kik',
5413 'kj': 'kua',
5414 'kk': 'kaz',
5415 'kl': 'kal',
5416 'km': 'khm',
5417 'kn': 'kan',
5418 'ko': 'kor',
5419 'kr': 'kau',
5420 'ks': 'kas',
5421 'ku': 'kur',
5422 'kv': 'kom',
5423 'kw': 'cor',
5424 'ky': 'kir',
5425 'la': 'lat',
5426 'lb': 'ltz',
5427 'lg': 'lug',
5428 'li': 'lim',
5429 'ln': 'lin',
5430 'lo': 'lao',
5431 'lt': 'lit',
5432 'lu': 'lub',
5433 'lv': 'lav',
5434 'mg': 'mlg',
5435 'mh': 'mah',
5436 'mi': 'mri',
5437 'mk': 'mkd',
5438 'ml': 'mal',
5439 'mn': 'mon',
5440 'mr': 'mar',
5441 'ms': 'msa',
5442 'mt': 'mlt',
5443 'my': 'mya',
5444 'na': 'nau',
5445 'nb': 'nob',
5446 'nd': 'nde',
5447 'ne': 'nep',
5448 'ng': 'ndo',
5449 'nl': 'nld',
5450 'nn': 'nno',
5451 'no': 'nor',
5452 'nr': 'nbl',
5453 'nv': 'nav',
5454 'ny': 'nya',
5455 'oc': 'oci',
5456 'oj': 'oji',
5457 'om': 'orm',
5458 'or': 'ori',
5459 'os': 'oss',
5460 'pa': 'pan',
5461 'pi': 'pli',
5462 'pl': 'pol',
5463 'ps': 'pus',
5464 'pt': 'por',
5465 'qu': 'que',
5466 'rm': 'roh',
5467 'rn': 'run',
5468 'ro': 'ron',
5469 'ru': 'rus',
5470 'rw': 'kin',
5471 'sa': 'san',
5472 'sc': 'srd',
5473 'sd': 'snd',
5474 'se': 'sme',
5475 'sg': 'sag',
5476 'si': 'sin',
5477 'sk': 'slk',
5478 'sl': 'slv',
5479 'sm': 'smo',
5480 'sn': 'sna',
5481 'so': 'som',
5482 'sq': 'sqi',
5483 'sr': 'srp',
5484 'ss': 'ssw',
5485 'st': 'sot',
5486 'su': 'sun',
5487 'sv': 'swe',
5488 'sw': 'swa',
5489 'ta': 'tam',
5490 'te': 'tel',
5491 'tg': 'tgk',
5492 'th': 'tha',
5493 'ti': 'tir',
5494 'tk': 'tuk',
5495 'tl': 'tgl',
5496 'tn': 'tsn',
5497 'to': 'ton',
5498 'tr': 'tur',
5499 'ts': 'tso',
5500 'tt': 'tat',
5501 'tw': 'twi',
5502 'ty': 'tah',
5503 'ug': 'uig',
5504 'uk': 'ukr',
5505 'ur': 'urd',
5506 'uz': 'uzb',
5507 've': 'ven',
5508 'vi': 'vie',
5509 'vo': 'vol',
5510 'wa': 'wln',
5511 'wo': 'wol',
5512 'xh': 'xho',
5513 'yi': 'yid',
e9a50fba 5514 'ji': 'yid', # Replaced by yi in 1989 revision
39672624
YCH
5515 'yo': 'yor',
5516 'za': 'zha',
5517 'zh': 'zho',
5518 'zu': 'zul',
5519 }
5520
5521 @classmethod
5522 def short2long(cls, code):
5523 """Convert language code from ISO 639-1 to ISO 639-2/T"""
5524 return cls._lang_map.get(code[:2])
5525
5526 @classmethod
5527 def long2short(cls, code):
5528 """Convert language code from ISO 639-2/T to ISO 639-1"""
5529 for short_name, long_name in cls._lang_map.items():
5530 if long_name == code:
5531 return short_name
5532
5533
4eb10f66
YCH
5534class ISO3166Utils(object):
5535 # From http://data.okfn.org/data/core/country-list
5536 _country_map = {
5537 'AF': 'Afghanistan',
5538 'AX': 'Åland Islands',
5539 'AL': 'Albania',
5540 'DZ': 'Algeria',
5541 'AS': 'American Samoa',
5542 'AD': 'Andorra',
5543 'AO': 'Angola',
5544 'AI': 'Anguilla',
5545 'AQ': 'Antarctica',
5546 'AG': 'Antigua and Barbuda',
5547 'AR': 'Argentina',
5548 'AM': 'Armenia',
5549 'AW': 'Aruba',
5550 'AU': 'Australia',
5551 'AT': 'Austria',
5552 'AZ': 'Azerbaijan',
5553 'BS': 'Bahamas',
5554 'BH': 'Bahrain',
5555 'BD': 'Bangladesh',
5556 'BB': 'Barbados',
5557 'BY': 'Belarus',
5558 'BE': 'Belgium',
5559 'BZ': 'Belize',
5560 'BJ': 'Benin',
5561 'BM': 'Bermuda',
5562 'BT': 'Bhutan',
5563 'BO': 'Bolivia, Plurinational State of',
5564 'BQ': 'Bonaire, Sint Eustatius and Saba',
5565 'BA': 'Bosnia and Herzegovina',
5566 'BW': 'Botswana',
5567 'BV': 'Bouvet Island',
5568 'BR': 'Brazil',
5569 'IO': 'British Indian Ocean Territory',
5570 'BN': 'Brunei Darussalam',
5571 'BG': 'Bulgaria',
5572 'BF': 'Burkina Faso',
5573 'BI': 'Burundi',
5574 'KH': 'Cambodia',
5575 'CM': 'Cameroon',
5576 'CA': 'Canada',
5577 'CV': 'Cape Verde',
5578 'KY': 'Cayman Islands',
5579 'CF': 'Central African Republic',
5580 'TD': 'Chad',
5581 'CL': 'Chile',
5582 'CN': 'China',
5583 'CX': 'Christmas Island',
5584 'CC': 'Cocos (Keeling) Islands',
5585 'CO': 'Colombia',
5586 'KM': 'Comoros',
5587 'CG': 'Congo',
5588 'CD': 'Congo, the Democratic Republic of the',
5589 'CK': 'Cook Islands',
5590 'CR': 'Costa Rica',
5591 'CI': 'Côte d\'Ivoire',
5592 'HR': 'Croatia',
5593 'CU': 'Cuba',
5594 'CW': 'Curaçao',
5595 'CY': 'Cyprus',
5596 'CZ': 'Czech Republic',
5597 'DK': 'Denmark',
5598 'DJ': 'Djibouti',
5599 'DM': 'Dominica',
5600 'DO': 'Dominican Republic',
5601 'EC': 'Ecuador',
5602 'EG': 'Egypt',
5603 'SV': 'El Salvador',
5604 'GQ': 'Equatorial Guinea',
5605 'ER': 'Eritrea',
5606 'EE': 'Estonia',
5607 'ET': 'Ethiopia',
5608 'FK': 'Falkland Islands (Malvinas)',
5609 'FO': 'Faroe Islands',
5610 'FJ': 'Fiji',
5611 'FI': 'Finland',
5612 'FR': 'France',
5613 'GF': 'French Guiana',
5614 'PF': 'French Polynesia',
5615 'TF': 'French Southern Territories',
5616 'GA': 'Gabon',
5617 'GM': 'Gambia',
5618 'GE': 'Georgia',
5619 'DE': 'Germany',
5620 'GH': 'Ghana',
5621 'GI': 'Gibraltar',
5622 'GR': 'Greece',
5623 'GL': 'Greenland',
5624 'GD': 'Grenada',
5625 'GP': 'Guadeloupe',
5626 'GU': 'Guam',
5627 'GT': 'Guatemala',
5628 'GG': 'Guernsey',
5629 'GN': 'Guinea',
5630 'GW': 'Guinea-Bissau',
5631 'GY': 'Guyana',
5632 'HT': 'Haiti',
5633 'HM': 'Heard Island and McDonald Islands',
5634 'VA': 'Holy See (Vatican City State)',
5635 'HN': 'Honduras',
5636 'HK': 'Hong Kong',
5637 'HU': 'Hungary',
5638 'IS': 'Iceland',
5639 'IN': 'India',
5640 'ID': 'Indonesia',
5641 'IR': 'Iran, Islamic Republic of',
5642 'IQ': 'Iraq',
5643 'IE': 'Ireland',
5644 'IM': 'Isle of Man',
5645 'IL': 'Israel',
5646 'IT': 'Italy',
5647 'JM': 'Jamaica',
5648 'JP': 'Japan',
5649 'JE': 'Jersey',
5650 'JO': 'Jordan',
5651 'KZ': 'Kazakhstan',
5652 'KE': 'Kenya',
5653 'KI': 'Kiribati',
5654 'KP': 'Korea, Democratic People\'s Republic of',
5655 'KR': 'Korea, Republic of',
5656 'KW': 'Kuwait',
5657 'KG': 'Kyrgyzstan',
5658 'LA': 'Lao People\'s Democratic Republic',
5659 'LV': 'Latvia',
5660 'LB': 'Lebanon',
5661 'LS': 'Lesotho',
5662 'LR': 'Liberia',
5663 'LY': 'Libya',
5664 'LI': 'Liechtenstein',
5665 'LT': 'Lithuania',
5666 'LU': 'Luxembourg',
5667 'MO': 'Macao',
5668 'MK': 'Macedonia, the Former Yugoslav Republic of',
5669 'MG': 'Madagascar',
5670 'MW': 'Malawi',
5671 'MY': 'Malaysia',
5672 'MV': 'Maldives',
5673 'ML': 'Mali',
5674 'MT': 'Malta',
5675 'MH': 'Marshall Islands',
5676 'MQ': 'Martinique',
5677 'MR': 'Mauritania',
5678 'MU': 'Mauritius',
5679 'YT': 'Mayotte',
5680 'MX': 'Mexico',
5681 'FM': 'Micronesia, Federated States of',
5682 'MD': 'Moldova, Republic of',
5683 'MC': 'Monaco',
5684 'MN': 'Mongolia',
5685 'ME': 'Montenegro',
5686 'MS': 'Montserrat',
5687 'MA': 'Morocco',
5688 'MZ': 'Mozambique',
5689 'MM': 'Myanmar',
5690 'NA': 'Namibia',
5691 'NR': 'Nauru',
5692 'NP': 'Nepal',
5693 'NL': 'Netherlands',
5694 'NC': 'New Caledonia',
5695 'NZ': 'New Zealand',
5696 'NI': 'Nicaragua',
5697 'NE': 'Niger',
5698 'NG': 'Nigeria',
5699 'NU': 'Niue',
5700 'NF': 'Norfolk Island',
5701 'MP': 'Northern Mariana Islands',
5702 'NO': 'Norway',
5703 'OM': 'Oman',
5704 'PK': 'Pakistan',
5705 'PW': 'Palau',
5706 'PS': 'Palestine, State of',
5707 'PA': 'Panama',
5708 'PG': 'Papua New Guinea',
5709 'PY': 'Paraguay',
5710 'PE': 'Peru',
5711 'PH': 'Philippines',
5712 'PN': 'Pitcairn',
5713 'PL': 'Poland',
5714 'PT': 'Portugal',
5715 'PR': 'Puerto Rico',
5716 'QA': 'Qatar',
5717 'RE': 'Réunion',
5718 'RO': 'Romania',
5719 'RU': 'Russian Federation',
5720 'RW': 'Rwanda',
5721 'BL': 'Saint Barthélemy',
5722 'SH': 'Saint Helena, Ascension and Tristan da Cunha',
5723 'KN': 'Saint Kitts and Nevis',
5724 'LC': 'Saint Lucia',
5725 'MF': 'Saint Martin (French part)',
5726 'PM': 'Saint Pierre and Miquelon',
5727 'VC': 'Saint Vincent and the Grenadines',
5728 'WS': 'Samoa',
5729 'SM': 'San Marino',
5730 'ST': 'Sao Tome and Principe',
5731 'SA': 'Saudi Arabia',
5732 'SN': 'Senegal',
5733 'RS': 'Serbia',
5734 'SC': 'Seychelles',
5735 'SL': 'Sierra Leone',
5736 'SG': 'Singapore',
5737 'SX': 'Sint Maarten (Dutch part)',
5738 'SK': 'Slovakia',
5739 'SI': 'Slovenia',
5740 'SB': 'Solomon Islands',
5741 'SO': 'Somalia',
5742 'ZA': 'South Africa',
5743 'GS': 'South Georgia and the South Sandwich Islands',
5744 'SS': 'South Sudan',
5745 'ES': 'Spain',
5746 'LK': 'Sri Lanka',
5747 'SD': 'Sudan',
5748 'SR': 'Suriname',
5749 'SJ': 'Svalbard and Jan Mayen',
5750 'SZ': 'Swaziland',
5751 'SE': 'Sweden',
5752 'CH': 'Switzerland',
5753 'SY': 'Syrian Arab Republic',
5754 'TW': 'Taiwan, Province of China',
5755 'TJ': 'Tajikistan',
5756 'TZ': 'Tanzania, United Republic of',
5757 'TH': 'Thailand',
5758 'TL': 'Timor-Leste',
5759 'TG': 'Togo',
5760 'TK': 'Tokelau',
5761 'TO': 'Tonga',
5762 'TT': 'Trinidad and Tobago',
5763 'TN': 'Tunisia',
5764 'TR': 'Turkey',
5765 'TM': 'Turkmenistan',
5766 'TC': 'Turks and Caicos Islands',
5767 'TV': 'Tuvalu',
5768 'UG': 'Uganda',
5769 'UA': 'Ukraine',
5770 'AE': 'United Arab Emirates',
5771 'GB': 'United Kingdom',
5772 'US': 'United States',
5773 'UM': 'United States Minor Outlying Islands',
5774 'UY': 'Uruguay',
5775 'UZ': 'Uzbekistan',
5776 'VU': 'Vanuatu',
5777 'VE': 'Venezuela, Bolivarian Republic of',
5778 'VN': 'Viet Nam',
5779 'VG': 'Virgin Islands, British',
5780 'VI': 'Virgin Islands, U.S.',
5781 'WF': 'Wallis and Futuna',
5782 'EH': 'Western Sahara',
5783 'YE': 'Yemen',
5784 'ZM': 'Zambia',
5785 'ZW': 'Zimbabwe',
5786 }
5787
5788 @classmethod
5789 def short2full(cls, code):
5790 """Convert an ISO 3166-2 country code to the corresponding full name"""
5791 return cls._country_map.get(code.upper())
5792
5793
773f291d
S
5794class GeoUtils(object):
5795 # Major IPv4 address blocks per country
5796 _country_ip_map = {
53896ca5 5797 'AD': '46.172.224.0/19',
773f291d
S
5798 'AE': '94.200.0.0/13',
5799 'AF': '149.54.0.0/17',
5800 'AG': '209.59.64.0/18',
5801 'AI': '204.14.248.0/21',
5802 'AL': '46.99.0.0/16',
5803 'AM': '46.70.0.0/15',
5804 'AO': '105.168.0.0/13',
53896ca5
S
5805 'AP': '182.50.184.0/21',
5806 'AQ': '23.154.160.0/24',
773f291d
S
5807 'AR': '181.0.0.0/12',
5808 'AS': '202.70.112.0/20',
53896ca5 5809 'AT': '77.116.0.0/14',
773f291d
S
5810 'AU': '1.128.0.0/11',
5811 'AW': '181.41.0.0/18',
53896ca5
S
5812 'AX': '185.217.4.0/22',
5813 'AZ': '5.197.0.0/16',
773f291d
S
5814 'BA': '31.176.128.0/17',
5815 'BB': '65.48.128.0/17',
5816 'BD': '114.130.0.0/16',
5817 'BE': '57.0.0.0/8',
53896ca5 5818 'BF': '102.178.0.0/15',
773f291d
S
5819 'BG': '95.42.0.0/15',
5820 'BH': '37.131.0.0/17',
5821 'BI': '154.117.192.0/18',
5822 'BJ': '137.255.0.0/16',
53896ca5 5823 'BL': '185.212.72.0/23',
773f291d
S
5824 'BM': '196.12.64.0/18',
5825 'BN': '156.31.0.0/16',
5826 'BO': '161.56.0.0/16',
5827 'BQ': '161.0.80.0/20',
53896ca5 5828 'BR': '191.128.0.0/12',
773f291d
S
5829 'BS': '24.51.64.0/18',
5830 'BT': '119.2.96.0/19',
5831 'BW': '168.167.0.0/16',
5832 'BY': '178.120.0.0/13',
5833 'BZ': '179.42.192.0/18',
5834 'CA': '99.224.0.0/11',
5835 'CD': '41.243.0.0/16',
53896ca5
S
5836 'CF': '197.242.176.0/21',
5837 'CG': '160.113.0.0/16',
773f291d 5838 'CH': '85.0.0.0/13',
53896ca5 5839 'CI': '102.136.0.0/14',
773f291d
S
5840 'CK': '202.65.32.0/19',
5841 'CL': '152.172.0.0/14',
53896ca5 5842 'CM': '102.244.0.0/14',
773f291d
S
5843 'CN': '36.128.0.0/10',
5844 'CO': '181.240.0.0/12',
5845 'CR': '201.192.0.0/12',
5846 'CU': '152.206.0.0/15',
5847 'CV': '165.90.96.0/19',
5848 'CW': '190.88.128.0/17',
53896ca5 5849 'CY': '31.153.0.0/16',
773f291d
S
5850 'CZ': '88.100.0.0/14',
5851 'DE': '53.0.0.0/8',
5852 'DJ': '197.241.0.0/17',
5853 'DK': '87.48.0.0/12',
5854 'DM': '192.243.48.0/20',
5855 'DO': '152.166.0.0/15',
5856 'DZ': '41.96.0.0/12',
5857 'EC': '186.68.0.0/15',
5858 'EE': '90.190.0.0/15',
5859 'EG': '156.160.0.0/11',
5860 'ER': '196.200.96.0/20',
5861 'ES': '88.0.0.0/11',
5862 'ET': '196.188.0.0/14',
5863 'EU': '2.16.0.0/13',
5864 'FI': '91.152.0.0/13',
5865 'FJ': '144.120.0.0/16',
53896ca5 5866 'FK': '80.73.208.0/21',
773f291d
S
5867 'FM': '119.252.112.0/20',
5868 'FO': '88.85.32.0/19',
5869 'FR': '90.0.0.0/9',
5870 'GA': '41.158.0.0/15',
5871 'GB': '25.0.0.0/8',
5872 'GD': '74.122.88.0/21',
5873 'GE': '31.146.0.0/16',
5874 'GF': '161.22.64.0/18',
5875 'GG': '62.68.160.0/19',
53896ca5
S
5876 'GH': '154.160.0.0/12',
5877 'GI': '95.164.0.0/16',
773f291d
S
5878 'GL': '88.83.0.0/19',
5879 'GM': '160.182.0.0/15',
5880 'GN': '197.149.192.0/18',
5881 'GP': '104.250.0.0/19',
5882 'GQ': '105.235.224.0/20',
5883 'GR': '94.64.0.0/13',
5884 'GT': '168.234.0.0/16',
5885 'GU': '168.123.0.0/16',
5886 'GW': '197.214.80.0/20',
5887 'GY': '181.41.64.0/18',
5888 'HK': '113.252.0.0/14',
5889 'HN': '181.210.0.0/16',
5890 'HR': '93.136.0.0/13',
5891 'HT': '148.102.128.0/17',
5892 'HU': '84.0.0.0/14',
5893 'ID': '39.192.0.0/10',
5894 'IE': '87.32.0.0/12',
5895 'IL': '79.176.0.0/13',
5896 'IM': '5.62.80.0/20',
5897 'IN': '117.192.0.0/10',
5898 'IO': '203.83.48.0/21',
5899 'IQ': '37.236.0.0/14',
5900 'IR': '2.176.0.0/12',
5901 'IS': '82.221.0.0/16',
5902 'IT': '79.0.0.0/10',
5903 'JE': '87.244.64.0/18',
5904 'JM': '72.27.0.0/17',
5905 'JO': '176.29.0.0/16',
53896ca5 5906 'JP': '133.0.0.0/8',
773f291d
S
5907 'KE': '105.48.0.0/12',
5908 'KG': '158.181.128.0/17',
5909 'KH': '36.37.128.0/17',
5910 'KI': '103.25.140.0/22',
5911 'KM': '197.255.224.0/20',
53896ca5 5912 'KN': '198.167.192.0/19',
773f291d
S
5913 'KP': '175.45.176.0/22',
5914 'KR': '175.192.0.0/10',
5915 'KW': '37.36.0.0/14',
5916 'KY': '64.96.0.0/15',
5917 'KZ': '2.72.0.0/13',
5918 'LA': '115.84.64.0/18',
5919 'LB': '178.135.0.0/16',
53896ca5 5920 'LC': '24.92.144.0/20',
773f291d
S
5921 'LI': '82.117.0.0/19',
5922 'LK': '112.134.0.0/15',
53896ca5 5923 'LR': '102.183.0.0/16',
773f291d
S
5924 'LS': '129.232.0.0/17',
5925 'LT': '78.56.0.0/13',
5926 'LU': '188.42.0.0/16',
5927 'LV': '46.109.0.0/16',
5928 'LY': '41.252.0.0/14',
5929 'MA': '105.128.0.0/11',
5930 'MC': '88.209.64.0/18',
5931 'MD': '37.246.0.0/16',
5932 'ME': '178.175.0.0/17',
5933 'MF': '74.112.232.0/21',
5934 'MG': '154.126.0.0/17',
5935 'MH': '117.103.88.0/21',
5936 'MK': '77.28.0.0/15',
5937 'ML': '154.118.128.0/18',
5938 'MM': '37.111.0.0/17',
5939 'MN': '49.0.128.0/17',
5940 'MO': '60.246.0.0/16',
5941 'MP': '202.88.64.0/20',
5942 'MQ': '109.203.224.0/19',
5943 'MR': '41.188.64.0/18',
5944 'MS': '208.90.112.0/22',
5945 'MT': '46.11.0.0/16',
5946 'MU': '105.16.0.0/12',
5947 'MV': '27.114.128.0/18',
53896ca5 5948 'MW': '102.70.0.0/15',
773f291d
S
5949 'MX': '187.192.0.0/11',
5950 'MY': '175.136.0.0/13',
5951 'MZ': '197.218.0.0/15',
5952 'NA': '41.182.0.0/16',
5953 'NC': '101.101.0.0/18',
5954 'NE': '197.214.0.0/18',
5955 'NF': '203.17.240.0/22',
5956 'NG': '105.112.0.0/12',
5957 'NI': '186.76.0.0/15',
5958 'NL': '145.96.0.0/11',
5959 'NO': '84.208.0.0/13',
5960 'NP': '36.252.0.0/15',
5961 'NR': '203.98.224.0/19',
5962 'NU': '49.156.48.0/22',
5963 'NZ': '49.224.0.0/14',
5964 'OM': '5.36.0.0/15',
5965 'PA': '186.72.0.0/15',
5966 'PE': '186.160.0.0/14',
5967 'PF': '123.50.64.0/18',
5968 'PG': '124.240.192.0/19',
5969 'PH': '49.144.0.0/13',
5970 'PK': '39.32.0.0/11',
5971 'PL': '83.0.0.0/11',
5972 'PM': '70.36.0.0/20',
5973 'PR': '66.50.0.0/16',
5974 'PS': '188.161.0.0/16',
5975 'PT': '85.240.0.0/13',
5976 'PW': '202.124.224.0/20',
5977 'PY': '181.120.0.0/14',
5978 'QA': '37.210.0.0/15',
53896ca5 5979 'RE': '102.35.0.0/16',
773f291d 5980 'RO': '79.112.0.0/13',
53896ca5 5981 'RS': '93.86.0.0/15',
773f291d 5982 'RU': '5.136.0.0/13',
53896ca5 5983 'RW': '41.186.0.0/16',
773f291d
S
5984 'SA': '188.48.0.0/13',
5985 'SB': '202.1.160.0/19',
5986 'SC': '154.192.0.0/11',
53896ca5 5987 'SD': '102.120.0.0/13',
773f291d 5988 'SE': '78.64.0.0/12',
53896ca5 5989 'SG': '8.128.0.0/10',
773f291d
S
5990 'SI': '188.196.0.0/14',
5991 'SK': '78.98.0.0/15',
53896ca5 5992 'SL': '102.143.0.0/17',
773f291d
S
5993 'SM': '89.186.32.0/19',
5994 'SN': '41.82.0.0/15',
53896ca5 5995 'SO': '154.115.192.0/18',
773f291d
S
5996 'SR': '186.179.128.0/17',
5997 'SS': '105.235.208.0/21',
5998 'ST': '197.159.160.0/19',
5999 'SV': '168.243.0.0/16',
6000 'SX': '190.102.0.0/20',
6001 'SY': '5.0.0.0/16',
6002 'SZ': '41.84.224.0/19',
6003 'TC': '65.255.48.0/20',
6004 'TD': '154.68.128.0/19',
6005 'TG': '196.168.0.0/14',
6006 'TH': '171.96.0.0/13',
6007 'TJ': '85.9.128.0/18',
6008 'TK': '27.96.24.0/21',
6009 'TL': '180.189.160.0/20',
6010 'TM': '95.85.96.0/19',
6011 'TN': '197.0.0.0/11',
6012 'TO': '175.176.144.0/21',
6013 'TR': '78.160.0.0/11',
6014 'TT': '186.44.0.0/15',
6015 'TV': '202.2.96.0/19',
6016 'TW': '120.96.0.0/11',
6017 'TZ': '156.156.0.0/14',
53896ca5
S
6018 'UA': '37.52.0.0/14',
6019 'UG': '102.80.0.0/13',
6020 'US': '6.0.0.0/8',
773f291d 6021 'UY': '167.56.0.0/13',
53896ca5 6022 'UZ': '84.54.64.0/18',
773f291d 6023 'VA': '212.77.0.0/19',
53896ca5 6024 'VC': '207.191.240.0/21',
773f291d 6025 'VE': '186.88.0.0/13',
53896ca5 6026 'VG': '66.81.192.0/20',
773f291d
S
6027 'VI': '146.226.0.0/16',
6028 'VN': '14.160.0.0/11',
6029 'VU': '202.80.32.0/20',
6030 'WF': '117.20.32.0/21',
6031 'WS': '202.4.32.0/19',
6032 'YE': '134.35.0.0/16',
6033 'YT': '41.242.116.0/22',
6034 'ZA': '41.0.0.0/11',
53896ca5
S
6035 'ZM': '102.144.0.0/13',
6036 'ZW': '102.177.192.0/18',
773f291d
S
6037 }
6038
6039 @classmethod
5f95927a
S
6040 def random_ipv4(cls, code_or_block):
6041 if len(code_or_block) == 2:
6042 block = cls._country_ip_map.get(code_or_block.upper())
6043 if not block:
6044 return None
6045 else:
6046 block = code_or_block
773f291d
S
6047 addr, preflen = block.split('/')
6048 addr_min = compat_struct_unpack('!L', socket.inet_aton(addr))[0]
6049 addr_max = addr_min | (0xffffffff >> int(preflen))
18a0defa 6050 return compat_str(socket.inet_ntoa(
4248dad9 6051 compat_struct_pack('!L', random.randint(addr_min, addr_max))))
773f291d
S
6052
6053
91410c9b 6054class PerRequestProxyHandler(compat_urllib_request.ProxyHandler):
2461f79d
PH
6055 def __init__(self, proxies=None):
6056 # Set default handlers
6057 for type in ('http', 'https'):
6058 setattr(self, '%s_open' % type,
6059 lambda r, proxy='__noproxy__', type=type, meth=self.proxy_open:
6060 meth(r, proxy, type))
38e87f6c 6061 compat_urllib_request.ProxyHandler.__init__(self, proxies)
2461f79d 6062
91410c9b 6063 def proxy_open(self, req, proxy, type):
2461f79d 6064 req_proxy = req.headers.get('Ytdl-request-proxy')
91410c9b
PH
6065 if req_proxy is not None:
6066 proxy = req_proxy
2461f79d
PH
6067 del req.headers['Ytdl-request-proxy']
6068
6069 if proxy == '__noproxy__':
6070 return None # No Proxy
51fb4995 6071 if compat_urlparse.urlparse(proxy).scheme.lower() in ('socks', 'socks4', 'socks4a', 'socks5'):
71aff188 6072 req.add_header('Ytdl-socks-proxy', proxy)
7a5c1cfe 6073 # yt-dlp's http/https handlers do wrapping the socket with socks
71aff188 6074 return None
91410c9b
PH
6075 return compat_urllib_request.ProxyHandler.proxy_open(
6076 self, req, proxy, type)
5bc880b9
YCH
6077
6078
0a5445dd
YCH
6079# Both long_to_bytes and bytes_to_long are adapted from PyCrypto, which is
6080# released into Public Domain
6081# https://github.com/dlitz/pycrypto/blob/master/lib/Crypto/Util/number.py#L387
6082
6083def long_to_bytes(n, blocksize=0):
6084 """long_to_bytes(n:long, blocksize:int) : string
6085 Convert a long integer to a byte string.
6086
6087 If optional blocksize is given and greater than zero, pad the front of the
6088 byte string with binary zeros so that the length is a multiple of
6089 blocksize.
6090 """
6091 # after much testing, this algorithm was deemed to be the fastest
6092 s = b''
6093 n = int(n)
6094 while n > 0:
6095 s = compat_struct_pack('>I', n & 0xffffffff) + s
6096 n = n >> 32
6097 # strip off leading zeros
6098 for i in range(len(s)):
6099 if s[i] != b'\000'[0]:
6100 break
6101 else:
6102 # only happens when n == 0
6103 s = b'\000'
6104 i = 0
6105 s = s[i:]
6106 # add back some pad bytes. this could be done more efficiently w.r.t. the
6107 # de-padding being done above, but sigh...
6108 if blocksize > 0 and len(s) % blocksize:
6109 s = (blocksize - len(s) % blocksize) * b'\000' + s
6110 return s
6111
6112
6113def bytes_to_long(s):
6114 """bytes_to_long(string) : long
6115 Convert a byte string to a long integer.
6116
6117 This is (essentially) the inverse of long_to_bytes().
6118 """
6119 acc = 0
6120 length = len(s)
6121 if length % 4:
6122 extra = (4 - length % 4)
6123 s = b'\000' * extra + s
6124 length = length + extra
6125 for i in range(0, length, 4):
6126 acc = (acc << 32) + compat_struct_unpack('>I', s[i:i + 4])[0]
6127 return acc
6128
6129
5bc880b9
YCH
6130def ohdave_rsa_encrypt(data, exponent, modulus):
6131 '''
6132 Implement OHDave's RSA algorithm. See http://www.ohdave.com/rsa/
6133
6134 Input:
6135 data: data to encrypt, bytes-like object
6136 exponent, modulus: parameter e and N of RSA algorithm, both integer
6137 Output: hex string of encrypted data
6138
6139 Limitation: supports one block encryption only
6140 '''
6141
6142 payload = int(binascii.hexlify(data[::-1]), 16)
6143 encrypted = pow(payload, exponent, modulus)
6144 return '%x' % encrypted
81bdc8fd
YCH
6145
6146
f48409c7
YCH
6147def pkcs1pad(data, length):
6148 """
6149 Padding input data with PKCS#1 scheme
6150
6151 @param {int[]} data input data
6152 @param {int} length target length
6153 @returns {int[]} padded data
6154 """
6155 if len(data) > length - 11:
6156 raise ValueError('Input data too long for PKCS#1 padding')
6157
6158 pseudo_random = [random.randint(0, 254) for _ in range(length - len(data) - 3)]
6159 return [0, 2] + pseudo_random + [0] + data
6160
6161
5eb6bdce 6162def encode_base_n(num, n, table=None):
59f898b7 6163 FULL_TABLE = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
59f898b7
YCH
6164 if not table:
6165 table = FULL_TABLE[:n]
6166
5eb6bdce
YCH
6167 if n > len(table):
6168 raise ValueError('base %d exceeds table length %d' % (n, len(table)))
6169
6170 if num == 0:
6171 return table[0]
6172
81bdc8fd
YCH
6173 ret = ''
6174 while num:
6175 ret = table[num % n] + ret
6176 num = num // n
6177 return ret
f52354a8
YCH
6178
6179
6180def decode_packed_codes(code):
06b3fe29 6181 mobj = re.search(PACKED_CODES_RE, code)
a0566bbf 6182 obfuscated_code, base, count, symbols = mobj.groups()
f52354a8
YCH
6183 base = int(base)
6184 count = int(count)
6185 symbols = symbols.split('|')
6186 symbol_table = {}
6187
6188 while count:
6189 count -= 1
5eb6bdce 6190 base_n_count = encode_base_n(count, base)
f52354a8
YCH
6191 symbol_table[base_n_count] = symbols[count] or base_n_count
6192
6193 return re.sub(
6194 r'\b(\w+)\b', lambda mobj: symbol_table[mobj.group(0)],
a0566bbf 6195 obfuscated_code)
e154c651 6196
6197
1ced2221
S
6198def caesar(s, alphabet, shift):
6199 if shift == 0:
6200 return s
6201 l = len(alphabet)
6202 return ''.join(
6203 alphabet[(alphabet.index(c) + shift) % l] if c in alphabet else c
6204 for c in s)
6205
6206
6207def rot47(s):
6208 return caesar(s, r'''!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~''', 47)
6209
6210
e154c651 6211def parse_m3u8_attributes(attrib):
6212 info = {}
6213 for (key, val) in re.findall(r'(?P<key>[A-Z0-9-]+)=(?P<val>"[^"]+"|[^",]+)(?:,|$)', attrib):
6214 if val.startswith('"'):
6215 val = val[1:-1]
6216 info[key] = val
6217 return info
1143535d
YCH
6218
6219
6220def urshift(val, n):
6221 return val >> n if val >= 0 else (val + 0x100000000) >> n
d3f8e038
YCH
6222
6223
6224# Based on png2str() written by @gdkchan and improved by @yokrysty
067aa17e 6225# Originally posted at https://github.com/ytdl-org/youtube-dl/issues/9706
d3f8e038
YCH
6226def decode_png(png_data):
6227 # Reference: https://www.w3.org/TR/PNG/
6228 header = png_data[8:]
6229
6230 if png_data[:8] != b'\x89PNG\x0d\x0a\x1a\x0a' or header[4:8] != b'IHDR':
6231 raise IOError('Not a valid PNG file.')
6232
6233 int_map = {1: '>B', 2: '>H', 4: '>I'}
6234 unpack_integer = lambda x: compat_struct_unpack(int_map[len(x)], x)[0]
6235
6236 chunks = []
6237
6238 while header:
6239 length = unpack_integer(header[:4])
6240 header = header[4:]
6241
6242 chunk_type = header[:4]
6243 header = header[4:]
6244
6245 chunk_data = header[:length]
6246 header = header[length:]
6247
6248 header = header[4:] # Skip CRC
6249
6250 chunks.append({
6251 'type': chunk_type,
6252 'length': length,
6253 'data': chunk_data
6254 })
6255
6256 ihdr = chunks[0]['data']
6257
6258 width = unpack_integer(ihdr[:4])
6259 height = unpack_integer(ihdr[4:8])
6260
6261 idat = b''
6262
6263 for chunk in chunks:
6264 if chunk['type'] == b'IDAT':
6265 idat += chunk['data']
6266
6267 if not idat:
6268 raise IOError('Unable to read PNG data.')
6269
6270 decompressed_data = bytearray(zlib.decompress(idat))
6271
6272 stride = width * 3
6273 pixels = []
6274
6275 def _get_pixel(idx):
6276 x = idx % stride
6277 y = idx // stride
6278 return pixels[y][x]
6279
6280 for y in range(height):
6281 basePos = y * (1 + stride)
6282 filter_type = decompressed_data[basePos]
6283
6284 current_row = []
6285
6286 pixels.append(current_row)
6287
6288 for x in range(stride):
6289 color = decompressed_data[1 + basePos + x]
6290 basex = y * stride + x
6291 left = 0
6292 up = 0
6293
6294 if x > 2:
6295 left = _get_pixel(basex - 3)
6296 if y > 0:
6297 up = _get_pixel(basex - stride)
6298
6299 if filter_type == 1: # Sub
6300 color = (color + left) & 0xff
6301 elif filter_type == 2: # Up
6302 color = (color + up) & 0xff
6303 elif filter_type == 3: # Average
6304 color = (color + ((left + up) >> 1)) & 0xff
6305 elif filter_type == 4: # Paeth
6306 a = left
6307 b = up
6308 c = 0
6309
6310 if x > 2 and y > 0:
6311 c = _get_pixel(basex - stride - 3)
6312
6313 p = a + b - c
6314
6315 pa = abs(p - a)
6316 pb = abs(p - b)
6317 pc = abs(p - c)
6318
6319 if pa <= pb and pa <= pc:
6320 color = (color + a) & 0xff
6321 elif pb <= pc:
6322 color = (color + b) & 0xff
6323 else:
6324 color = (color + c) & 0xff
6325
6326 current_row.append(color)
6327
6328 return width, height, pixels
efa97bdc
YCH
6329
6330
6331def write_xattr(path, key, value):
6332 # This mess below finds the best xattr tool for the job
6333 try:
6334 # try the pyxattr module...
6335 import xattr
6336
53a7e3d2
YCH
6337 if hasattr(xattr, 'set'): # pyxattr
6338 # Unicode arguments are not supported in python-pyxattr until
6339 # version 0.5.0
067aa17e 6340 # See https://github.com/ytdl-org/youtube-dl/issues/5498
53a7e3d2
YCH
6341 pyxattr_required_version = '0.5.0'
6342 if version_tuple(xattr.__version__) < version_tuple(pyxattr_required_version):
6343 # TODO: fallback to CLI tools
6344 raise XAttrUnavailableError(
6345 'python-pyxattr is detected but is too old. '
7a5c1cfe 6346 'yt-dlp requires %s or above while your version is %s. '
53a7e3d2
YCH
6347 'Falling back to other xattr implementations' % (
6348 pyxattr_required_version, xattr.__version__))
6349
6350 setxattr = xattr.set
6351 else: # xattr
6352 setxattr = xattr.setxattr
efa97bdc
YCH
6353
6354 try:
53a7e3d2 6355 setxattr(path, key, value)
efa97bdc
YCH
6356 except EnvironmentError as e:
6357 raise XAttrMetadataError(e.errno, e.strerror)
6358
6359 except ImportError:
6360 if compat_os_name == 'nt':
6361 # Write xattrs to NTFS Alternate Data Streams:
6362 # http://en.wikipedia.org/wiki/NTFS#Alternate_data_streams_.28ADS.29
6363 assert ':' not in key
6364 assert os.path.exists(path)
6365
6366 ads_fn = path + ':' + key
6367 try:
6368 with open(ads_fn, 'wb') as f:
6369 f.write(value)
6370 except EnvironmentError as e:
6371 raise XAttrMetadataError(e.errno, e.strerror)
6372 else:
6373 user_has_setfattr = check_executable('setfattr', ['--version'])
6374 user_has_xattr = check_executable('xattr', ['-h'])
6375
6376 if user_has_setfattr or user_has_xattr:
6377
6378 value = value.decode('utf-8')
6379 if user_has_setfattr:
6380 executable = 'setfattr'
6381 opts = ['-n', key, '-v', value]
6382 elif user_has_xattr:
6383 executable = 'xattr'
6384 opts = ['-w', key, value]
6385
3089bc74
S
6386 cmd = ([encodeFilename(executable, True)]
6387 + [encodeArgument(o) for o in opts]
6388 + [encodeFilename(path, True)])
efa97bdc
YCH
6389
6390 try:
d3c93ec2 6391 p = Popen(
efa97bdc
YCH
6392 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
6393 except EnvironmentError as e:
6394 raise XAttrMetadataError(e.errno, e.strerror)
d3c93ec2 6395 stdout, stderr = p.communicate_or_kill()
efa97bdc
YCH
6396 stderr = stderr.decode('utf-8', 'replace')
6397 if p.returncode != 0:
6398 raise XAttrMetadataError(p.returncode, stderr)
6399
6400 else:
6401 # On Unix, and can't find pyxattr, setfattr, or xattr.
6402 if sys.platform.startswith('linux'):
6403 raise XAttrUnavailableError(
6404 "Couldn't find a tool to set the xattrs. "
6405 "Install either the python 'pyxattr' or 'xattr' "
6406 "modules, or the GNU 'attr' package "
6407 "(which contains the 'setfattr' tool).")
6408 else:
6409 raise XAttrUnavailableError(
6410 "Couldn't find a tool to set the xattrs. "
6411 "Install either the python 'xattr' module, "
6412 "or the 'xattr' binary.")
0c265486
YCH
6413
6414
6415def random_birthday(year_field, month_field, day_field):
aa374bc7
AS
6416 start_date = datetime.date(1950, 1, 1)
6417 end_date = datetime.date(1995, 12, 31)
6418 offset = random.randint(0, (end_date - start_date).days)
6419 random_date = start_date + datetime.timedelta(offset)
0c265486 6420 return {
aa374bc7
AS
6421 year_field: str(random_date.year),
6422 month_field: str(random_date.month),
6423 day_field: str(random_date.day),
0c265486 6424 }
732044af 6425
c76eb41b 6426
732044af 6427# Templates for internet shortcut files, which are plain text files.
6428DOT_URL_LINK_TEMPLATE = '''
6429[InternetShortcut]
6430URL=%(url)s
6431'''.lstrip()
6432
6433DOT_WEBLOC_LINK_TEMPLATE = '''
6434<?xml version="1.0" encoding="UTF-8"?>
6435<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
6436<plist version="1.0">
6437<dict>
6438\t<key>URL</key>
6439\t<string>%(url)s</string>
6440</dict>
6441</plist>
6442'''.lstrip()
6443
6444DOT_DESKTOP_LINK_TEMPLATE = '''
6445[Desktop Entry]
6446Encoding=UTF-8
6447Name=%(filename)s
6448Type=Link
6449URL=%(url)s
6450Icon=text-html
6451'''.lstrip()
6452
08438d2c 6453LINK_TEMPLATES = {
6454 'url': DOT_URL_LINK_TEMPLATE,
6455 'desktop': DOT_DESKTOP_LINK_TEMPLATE,
6456 'webloc': DOT_WEBLOC_LINK_TEMPLATE,
6457}
6458
732044af 6459
6460def iri_to_uri(iri):
6461 """
6462 Converts an IRI (Internationalized Resource Identifier, allowing Unicode characters) to a URI (Uniform Resource Identifier, ASCII-only).
6463
6464 The function doesn't add an additional layer of escaping; e.g., it doesn't escape `%3C` as `%253C`. Instead, it percent-escapes characters with an underlying UTF-8 encoding *besides* those already escaped, leaving the URI intact.
6465 """
6466
6467 iri_parts = compat_urllib_parse_urlparse(iri)
6468
6469 if '[' in iri_parts.netloc:
6470 raise ValueError('IPv6 URIs are not, yet, supported.')
6471 # Querying `.netloc`, when there's only one bracket, also raises a ValueError.
6472
6473 # The `safe` argument values, that the following code uses, contain the characters that should not be percent-encoded. Everything else but letters, digits and '_.-' will be percent-encoded with an underlying UTF-8 encoding. Everything already percent-encoded will be left as is.
6474
6475 net_location = ''
6476 if iri_parts.username:
6477 net_location += compat_urllib_parse_quote(iri_parts.username, safe=r"!$%&'()*+,~")
6478 if iri_parts.password is not None:
6479 net_location += ':' + compat_urllib_parse_quote(iri_parts.password, safe=r"!$%&'()*+,~")
6480 net_location += '@'
6481
6482 net_location += iri_parts.hostname.encode('idna').decode('utf-8') # Punycode for Unicode hostnames.
6483 # The 'idna' encoding produces ASCII text.
6484 if iri_parts.port is not None and iri_parts.port != 80:
6485 net_location += ':' + str(iri_parts.port)
6486
6487 return compat_urllib_parse_urlunparse(
6488 (iri_parts.scheme,
6489 net_location,
6490
6491 compat_urllib_parse_quote_plus(iri_parts.path, safe=r"!$%&'()*+,/:;=@|~"),
6492
6493 # Unsure about the `safe` argument, since this is a legacy way of handling parameters.
6494 compat_urllib_parse_quote_plus(iri_parts.params, safe=r"!$%&'()*+,/:;=@|~"),
6495
6496 # Not totally sure about the `safe` argument, since the source does not explicitly mention the query URI component.
6497 compat_urllib_parse_quote_plus(iri_parts.query, safe=r"!$%&'()*+,/:;=?@{|}~"),
6498
6499 compat_urllib_parse_quote_plus(iri_parts.fragment, safe=r"!#$%&'()*+,/:;=?@{|}~")))
6500
6501 # Source for `safe` arguments: https://url.spec.whatwg.org/#percent-encoded-bytes.
6502
6503
6504def to_high_limit_path(path):
6505 if sys.platform in ['win32', 'cygwin']:
6506 # Work around MAX_PATH limitation on Windows. The maximum allowed length for the individual path segments may still be quite limited.
6507 return r'\\?\ '.rstrip() + os.path.abspath(path)
6508
6509 return path
76d321f6 6510
c76eb41b 6511
b868936c 6512def format_field(obj, field=None, template='%s', ignore=(None, ''), default='', func=None):
6513 if field is None:
6514 val = obj if obj is not None else default
6515 else:
6516 val = obj.get(field, default)
76d321f6 6517 if func and val not in ignore:
6518 val = func(val)
6519 return template % val if val not in ignore else default
00dd0cd5 6520
6521
6522def clean_podcast_url(url):
6523 return re.sub(r'''(?x)
6524 (?:
6525 (?:
6526 chtbl\.com/track|
6527 media\.blubrry\.com| # https://create.blubrry.com/resources/podcast-media-download-statistics/getting-started/
6528 play\.podtrac\.com
6529 )/[^/]+|
6530 (?:dts|www)\.podtrac\.com/(?:pts/)?redirect\.[0-9a-z]{3,4}| # http://analytics.podtrac.com/how-to-measure
6531 flex\.acast\.com|
6532 pd(?:
6533 cn\.co| # https://podcorn.com/analytics-prefix/
6534 st\.fm # https://podsights.com/docs/
6535 )/e
6536 )/''', '', url)
ffcb8191
THD
6537
6538
6539_HEX_TABLE = '0123456789abcdef'
6540
6541
6542def random_uuidv4():
6543 return re.sub(r'[xy]', lambda x: _HEX_TABLE[random.randint(0, 15)], 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx')
0202b52a 6544
6545
6546def make_dir(path, to_screen=None):
6547 try:
6548 dn = os.path.dirname(path)
6549 if dn and not os.path.exists(dn):
6550 os.makedirs(dn)
6551 return True
6552 except (OSError, IOError) as err:
6553 if callable(to_screen) is not None:
6554 to_screen('unable to create directory ' + error_to_compat_str(err))
6555 return False
f74980cb 6556
6557
6558def get_executable_path():
c552ae88 6559 from zipimport import zipimporter
6560 if hasattr(sys, 'frozen'): # Running from PyInstaller
6561 path = os.path.dirname(sys.executable)
6562 elif isinstance(globals().get('__loader__'), zipimporter): # Running from ZIP
6563 path = os.path.join(os.path.dirname(__file__), '../..')
6564 else:
6565 path = os.path.join(os.path.dirname(__file__), '..')
f74980cb 6566 return os.path.abspath(path)
6567
6568
2f567473 6569def load_plugins(name, suffix, namespace):
3ae5e797 6570 classes = {}
f74980cb 6571 try:
019a94f7
ÁS
6572 plugins_spec = importlib.util.spec_from_file_location(
6573 name, os.path.join(get_executable_path(), 'ytdlp_plugins', name, '__init__.py'))
6574 plugins = importlib.util.module_from_spec(plugins_spec)
6575 sys.modules[plugins_spec.name] = plugins
6576 plugins_spec.loader.exec_module(plugins)
f74980cb 6577 for name in dir(plugins):
2f567473 6578 if name in namespace:
6579 continue
6580 if not name.endswith(suffix):
f74980cb 6581 continue
6582 klass = getattr(plugins, name)
3ae5e797 6583 classes[name] = namespace[name] = klass
019a94f7 6584 except FileNotFoundError:
f74980cb 6585 pass
f74980cb 6586 return classes
06167fbb 6587
6588
325ebc17 6589def traverse_obj(
352d63fd 6590 obj, *path_list, default=None, expected_type=None, get_all=True,
325ebc17 6591 casesense=True, is_user_input=False, traverse_string=False):
324ad820 6592 ''' Traverse nested list/dict/tuple
8f334380 6593 @param path_list A list of paths which are checked one by one.
6594 Each path is a list of keys where each key is a string,
1797b073 6595 a function, a tuple of strings/None or "...".
2614f646 6596 When a fuction is given, it takes the key as argument and
6597 returns whether the key matches or not. When a tuple is given,
8f334380 6598 all the keys given in the tuple are traversed, and
6599 "..." traverses all the keys in the object
1797b073 6600 "None" returns the object without traversal
325ebc17 6601 @param default Default value to return
352d63fd 6602 @param expected_type Only accept final value of this type (Can also be any callable)
6603 @param get_all Return all the values obtained from a path or only the first one
324ad820 6604 @param casesense Whether to consider dictionary keys as case sensitive
6605 @param is_user_input Whether the keys are generated from user input. If True,
6606 strings are converted to int/slice if necessary
6607 @param traverse_string Whether to traverse inside strings. If True, any
6608 non-compatible object will also be converted into a string
8f334380 6609 # TODO: Write tests
324ad820 6610 '''
325ebc17 6611 if not casesense:
dbf5416a 6612 _lower = lambda k: (k.lower() if isinstance(k, str) else k)
8f334380 6613 path_list = (map(_lower, variadic(path)) for path in path_list)
6614
6615 def _traverse_obj(obj, path, _current_depth=0):
6616 nonlocal depth
6617 path = tuple(variadic(path))
6618 for i, key in enumerate(path):
1797b073 6619 if None in (key, obj):
6620 return obj
8f334380 6621 if isinstance(key, (list, tuple)):
6622 obj = [_traverse_obj(obj, sub_key, _current_depth) for sub_key in key]
6623 key = ...
6624 if key is ...:
6625 obj = (obj.values() if isinstance(obj, dict)
6626 else obj if isinstance(obj, (list, tuple, LazyList))
6627 else str(obj) if traverse_string else [])
6628 _current_depth += 1
6629 depth = max(depth, _current_depth)
6630 return [_traverse_obj(inner_obj, path[i + 1:], _current_depth) for inner_obj in obj]
2614f646 6631 elif callable(key):
6632 if isinstance(obj, (list, tuple, LazyList)):
6633 obj = enumerate(obj)
6634 elif isinstance(obj, dict):
6635 obj = obj.items()
6636 else:
6637 if not traverse_string:
6638 return None
6639 obj = str(obj)
6640 _current_depth += 1
6641 depth = max(depth, _current_depth)
6642 return [_traverse_obj(v, path[i + 1:], _current_depth) for k, v in obj if key(k)]
575e17a1 6643 elif isinstance(obj, dict) and not (is_user_input and key == ':'):
325ebc17 6644 obj = (obj.get(key) if casesense or (key in obj)
6645 else next((v for k, v in obj.items() if _lower(k) == key), None))
6646 else:
6647 if is_user_input:
6648 key = (int_or_none(key) if ':' not in key
6649 else slice(*map(int_or_none, key.split(':'))))
8f334380 6650 if key == slice(None):
575e17a1 6651 return _traverse_obj(obj, (..., *path[i + 1:]), _current_depth)
325ebc17 6652 if not isinstance(key, (int, slice)):
9fea350f 6653 return None
8f334380 6654 if not isinstance(obj, (list, tuple, LazyList)):
325ebc17 6655 if not traverse_string:
6656 return None
6657 obj = str(obj)
6658 try:
6659 obj = obj[key]
6660 except IndexError:
324ad820 6661 return None
325ebc17 6662 return obj
6663
352d63fd 6664 if isinstance(expected_type, type):
6665 type_test = lambda val: val if isinstance(val, expected_type) else None
6666 elif expected_type is not None:
6667 type_test = expected_type
6668 else:
6669 type_test = lambda val: val
6670
8f334380 6671 for path in path_list:
6672 depth = 0
6673 val = _traverse_obj(obj, path)
325ebc17 6674 if val is not None:
8f334380 6675 if depth:
6676 for _ in range(depth - 1):
6586bca9 6677 val = itertools.chain.from_iterable(v for v in val if v is not None)
352d63fd 6678 val = [v for v in map(type_test, val) if v is not None]
8f334380 6679 if val:
352d63fd 6680 return val if get_all else val[0]
6681 else:
6682 val = type_test(val)
6683 if val is not None:
8f334380 6684 return val
325ebc17 6685 return default
324ad820 6686
6687
6688def traverse_dict(dictn, keys, casesense=True):
ee8dd27a 6689 write_string('DeprecationWarning: yt_dlp.utils.traverse_dict is deprecated '
6690 'and may be removed in a future version. Use yt_dlp.utils.traverse_obj instead')
6691 return traverse_obj(dictn, keys, casesense=casesense, is_user_input=True, traverse_string=True)
6606817a 6692
6693
4b4b7f74 6694def variadic(x, allowed_types=(str, bytes, dict)):
cb89cfc1 6695 return x if isinstance(x, collections.abc.Iterable) and not isinstance(x, allowed_types) else (x,)
bd50a52b
THD
6696
6697
49fa4d9a
N
6698# create a JSON Web Signature (jws) with HS256 algorithm
6699# the resulting format is in JWS Compact Serialization
6700# implemented following JWT https://www.rfc-editor.org/rfc/rfc7519.html
6701# implemented following JWS https://www.rfc-editor.org/rfc/rfc7515.html
6702def jwt_encode_hs256(payload_data, key, headers={}):
6703 header_data = {
6704 'alg': 'HS256',
6705 'typ': 'JWT',
6706 }
6707 if headers:
6708 header_data.update(headers)
6709 header_b64 = base64.b64encode(json.dumps(header_data).encode('utf-8'))
6710 payload_b64 = base64.b64encode(json.dumps(payload_data).encode('utf-8'))
6711 h = hmac.new(key.encode('utf-8'), header_b64 + b'.' + payload_b64, hashlib.sha256)
6712 signature_b64 = base64.b64encode(h.digest())
6713 token = header_b64 + b'.' + payload_b64 + b'.' + signature_b64
6714 return token
819e0531 6715
6716
16b0d7e6 6717# can be extended in future to verify the signature and parse header and return the algorithm used if it's not HS256
6718def jwt_decode_hs256(jwt):
6719 header_b64, payload_b64, signature_b64 = jwt.split('.')
6720 payload_data = json.loads(base64.urlsafe_b64decode(payload_b64))
6721 return payload_data
6722
6723
819e0531 6724def supports_terminal_sequences(stream):
6725 if compat_os_name == 'nt':
e3c7d495 6726 from .compat import WINDOWS_VT_MODE # Must be imported locally
6727 if not WINDOWS_VT_MODE or get_windows_version() < (10, 0, 10586):
819e0531 6728 return False
6729 elif not os.getenv('TERM'):
6730 return False
6731 try:
6732 return stream.isatty()
6733 except BaseException:
6734 return False
6735
6736
ec11a9f4 6737_terminal_sequences_re = re.compile('\033\\[[^m]+m')
6738
6739
6740def remove_terminal_sequences(string):
6741 return _terminal_sequences_re.sub('', string)
6742
6743
6744def number_of_digits(number):
6745 return len('%d' % number)
34921b43 6746
6747
6748def join_nonempty(*values, delim='-', from_dict=None):
6749 if from_dict is not None:
c586f9e8 6750 values = map(from_dict.get, values)
34921b43 6751 return delim.join(map(str, filter(None, values)))
06e57990 6752
6753
6754class Config:
6755 own_args = None
6756 filename = None
6757 __initialized = False
6758
6759 def __init__(self, parser, label=None):
6760 self._parser, self.label = parser, label
6761 self._loaded_paths, self.configs = set(), []
6762
6763 def init(self, args=None, filename=None):
6764 assert not self.__initialized
6765 if filename:
6766 location = os.path.realpath(filename)
6767 if location in self._loaded_paths:
6768 return False
6769 self._loaded_paths.add(location)
6770
6771 self.__initialized = True
6772 self.own_args, self.filename = args, filename
6773 for location in self._parser.parse_args(args)[0].config_locations or []:
6774 location = compat_expanduser(location)
6775 if os.path.isdir(location):
6776 location = os.path.join(location, 'yt-dlp.conf')
6777 if not os.path.exists(location):
6778 self._parser.error(f'config location {location} does not exist')
6779 self.append_config(self.read_file(location), location)
6780 return True
6781
6782 def __str__(self):
6783 label = join_nonempty(
6784 self.label, 'config', f'"{self.filename}"' if self.filename else '',
6785 delim=' ')
6786 return join_nonempty(
6787 self.own_args is not None and f'{label[0].upper()}{label[1:]}: {self.hide_login_info(self.own_args)}',
6788 *(f'\n{c}'.replace('\n', '\n| ')[1:] for c in self.configs),
6789 delim='\n')
6790
6791 @staticmethod
6792 def read_file(filename, default=[]):
6793 try:
6794 optionf = open(filename)
6795 except IOError:
6796 return default # silently skip if file is not present
6797 try:
6798 # FIXME: https://github.com/ytdl-org/youtube-dl/commit/dfe5fa49aed02cf36ba9f743b11b0903554b5e56
6799 contents = optionf.read()
6800 if sys.version_info < (3,):
6801 contents = contents.decode(preferredencoding())
6802 res = compat_shlex_split(contents, comments=True)
6803 finally:
6804 optionf.close()
6805 return res
6806
6807 @staticmethod
6808 def hide_login_info(opts):
6809 PRIVATE_OPTS = set(['-p', '--password', '-u', '--username', '--video-password', '--ap-password', '--ap-username'])
6810 eqre = re.compile('^(?P<key>' + ('|'.join(re.escape(po) for po in PRIVATE_OPTS)) + ')=.+$')
6811
6812 def _scrub_eq(o):
6813 m = eqre.match(o)
6814 if m:
6815 return m.group('key') + '=PRIVATE'
6816 else:
6817 return o
6818
6819 opts = list(map(_scrub_eq, opts))
6820 for idx, opt in enumerate(opts):
6821 if opt in PRIVATE_OPTS and idx + 1 < len(opts):
6822 opts[idx + 1] = 'PRIVATE'
6823 return opts
6824
6825 def append_config(self, *args, label=None):
6826 config = type(self)(self._parser, label)
6827 config._loaded_paths = self._loaded_paths
6828 if config.init(*args):
6829 self.configs.append(config)
6830
6831 @property
6832 def all_args(self):
6833 for config in reversed(self.configs):
6834 yield from config.all_args
6835 yield from self.own_args or []
6836
6837 def parse_args(self):
6838 return self._parser.parse_args(list(self.all_args))