]> jfr.im git - yt-dlp.git/blame - yt_dlp/utils.py
[youtube] Make invalid storyboard URL non-fatal
[yt-dlp.git] / yt_dlp / utils.py
CommitLineData
cc52de43 1#!/usr/bin/env python3
dcdb292f 2# coding: utf-8
d77c3dfd 3
ecc0c5ee
PH
4from __future__ import unicode_literals
5
1e399778 6import base64
5bc880b9 7import binascii
912b38b4 8import calendar
676eb3f2 9import codecs
c380cc28 10import collections
62e609ab 11import contextlib
e3946f98 12import ctypes
c496ca96
PH
13import datetime
14import email.utils
0c265486 15import email.header
f45c185f 16import errno
be4a824d 17import functools
d77c3dfd 18import gzip
49fa4d9a
N
19import hashlib
20import hmac
019a94f7 21import importlib.util
03f9daab 22import io
79a2e94e 23import itertools
f4bfd65f 24import json
d77c3dfd 25import locale
02dbf93f 26import math
347de493 27import operator
d77c3dfd 28import os
c496ca96 29import platform
773f291d 30import random
d77c3dfd 31import re
c496ca96 32import socket
79a2e94e 33import ssl
1c088fa8 34import subprocess
d77c3dfd 35import sys
181c8655 36import tempfile
c380cc28 37import time
01951dda 38import traceback
bcf89ce6 39import xml.etree.ElementTree
d77c3dfd 40import zlib
2814f12b 41import mimetypes
d77c3dfd 42
8c25f81b 43from .compat import (
b4a3d461 44 compat_HTMLParseError,
8bb56eee 45 compat_HTMLParser,
201c1459 46 compat_HTTPError,
8f9312c3 47 compat_basestring,
8c25f81b 48 compat_chr,
1bab3437 49 compat_cookiejar,
d7cd9a9e 50 compat_ctypes_WINFUNCTYPE,
36e6f62c 51 compat_etree_fromstring,
51098426 52 compat_expanduser,
8c25f81b 53 compat_html_entities,
55b2f099 54 compat_html_entities_html5,
be4a824d 55 compat_http_client,
42db58ec 56 compat_integer_types,
e29663c6 57 compat_numeric_types,
c86b6142 58 compat_kwargs,
efa97bdc 59 compat_os_name,
8c25f81b 60 compat_parse_qs,
06e57990 61 compat_shlex_split,
702ccf2d 62 compat_shlex_quote,
8c25f81b 63 compat_str,
edaa23f8 64 compat_struct_pack,
d3f8e038 65 compat_struct_unpack,
8c25f81b
PH
66 compat_urllib_error,
67 compat_urllib_parse,
15707c7e 68 compat_urllib_parse_urlencode,
8c25f81b 69 compat_urllib_parse_urlparse,
732044af 70 compat_urllib_parse_urlunparse,
71 compat_urllib_parse_quote,
72 compat_urllib_parse_quote_plus,
7581bfc9 73 compat_urllib_parse_unquote_plus,
8c25f81b
PH
74 compat_urllib_request,
75 compat_urlparse,
810c10ba 76 compat_xpath,
8c25f81b 77)
4644ac55 78
71aff188
YCH
79from .socks import (
80 ProxyType,
81 sockssocket,
82)
83
4644ac55 84
51fb4995
YCH
85def register_socks_protocols():
86 # "Register" SOCKS protocols
d5ae6bb5
YCH
87 # In Python < 2.6.5, urlsplit() suffers from bug https://bugs.python.org/issue7904
88 # URLs with protocols not in urlparse.uses_netloc are not handled correctly
51fb4995
YCH
89 for scheme in ('socks', 'socks4', 'socks4a', 'socks5'):
90 if scheme not in compat_urlparse.uses_netloc:
91 compat_urlparse.uses_netloc.append(scheme)
92
93
468e2e92
FV
94# This is not clearly defined otherwise
95compiled_regex_type = type(re.compile(''))
96
f7a147e3
S
97
98def random_user_agent():
99 _USER_AGENT_TPL = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/%s Safari/537.36'
100 _CHROME_VERSIONS = (
d6579d53 101 '74.0.3729.129',
102 '76.0.3780.3',
103 '76.0.3780.2',
104 '74.0.3729.128',
105 '76.0.3780.1',
106 '76.0.3780.0',
107 '75.0.3770.15',
108 '74.0.3729.127',
109 '74.0.3729.126',
110 '76.0.3779.1',
111 '76.0.3779.0',
112 '75.0.3770.14',
113 '74.0.3729.125',
114 '76.0.3778.1',
115 '76.0.3778.0',
116 '75.0.3770.13',
117 '74.0.3729.124',
118 '74.0.3729.123',
119 '73.0.3683.121',
120 '76.0.3777.1',
121 '76.0.3777.0',
122 '75.0.3770.12',
123 '74.0.3729.122',
124 '76.0.3776.4',
125 '75.0.3770.11',
126 '74.0.3729.121',
127 '76.0.3776.3',
128 '76.0.3776.2',
129 '73.0.3683.120',
130 '74.0.3729.120',
131 '74.0.3729.119',
132 '74.0.3729.118',
133 '76.0.3776.1',
134 '76.0.3776.0',
135 '76.0.3775.5',
136 '75.0.3770.10',
137 '74.0.3729.117',
138 '76.0.3775.4',
139 '76.0.3775.3',
140 '74.0.3729.116',
141 '75.0.3770.9',
142 '76.0.3775.2',
143 '76.0.3775.1',
144 '76.0.3775.0',
145 '75.0.3770.8',
146 '74.0.3729.115',
147 '74.0.3729.114',
148 '76.0.3774.1',
149 '76.0.3774.0',
150 '75.0.3770.7',
151 '74.0.3729.113',
152 '74.0.3729.112',
153 '74.0.3729.111',
154 '76.0.3773.1',
155 '76.0.3773.0',
156 '75.0.3770.6',
157 '74.0.3729.110',
158 '74.0.3729.109',
159 '76.0.3772.1',
160 '76.0.3772.0',
161 '75.0.3770.5',
162 '74.0.3729.108',
163 '74.0.3729.107',
164 '76.0.3771.1',
165 '76.0.3771.0',
166 '75.0.3770.4',
167 '74.0.3729.106',
168 '74.0.3729.105',
169 '75.0.3770.3',
170 '74.0.3729.104',
171 '74.0.3729.103',
172 '74.0.3729.102',
173 '75.0.3770.2',
174 '74.0.3729.101',
175 '75.0.3770.1',
176 '75.0.3770.0',
177 '74.0.3729.100',
178 '75.0.3769.5',
179 '75.0.3769.4',
180 '74.0.3729.99',
181 '75.0.3769.3',
182 '75.0.3769.2',
183 '75.0.3768.6',
184 '74.0.3729.98',
185 '75.0.3769.1',
186 '75.0.3769.0',
187 '74.0.3729.97',
188 '73.0.3683.119',
189 '73.0.3683.118',
190 '74.0.3729.96',
191 '75.0.3768.5',
192 '75.0.3768.4',
193 '75.0.3768.3',
194 '75.0.3768.2',
195 '74.0.3729.95',
196 '74.0.3729.94',
197 '75.0.3768.1',
198 '75.0.3768.0',
199 '74.0.3729.93',
200 '74.0.3729.92',
201 '73.0.3683.117',
202 '74.0.3729.91',
203 '75.0.3766.3',
204 '74.0.3729.90',
205 '75.0.3767.2',
206 '75.0.3767.1',
207 '75.0.3767.0',
208 '74.0.3729.89',
209 '73.0.3683.116',
210 '75.0.3766.2',
211 '74.0.3729.88',
212 '75.0.3766.1',
213 '75.0.3766.0',
214 '74.0.3729.87',
215 '73.0.3683.115',
216 '74.0.3729.86',
217 '75.0.3765.1',
218 '75.0.3765.0',
219 '74.0.3729.85',
220 '73.0.3683.114',
221 '74.0.3729.84',
222 '75.0.3764.1',
223 '75.0.3764.0',
224 '74.0.3729.83',
225 '73.0.3683.113',
226 '75.0.3763.2',
227 '75.0.3761.4',
228 '74.0.3729.82',
229 '75.0.3763.1',
230 '75.0.3763.0',
231 '74.0.3729.81',
232 '73.0.3683.112',
233 '75.0.3762.1',
234 '75.0.3762.0',
235 '74.0.3729.80',
236 '75.0.3761.3',
237 '74.0.3729.79',
238 '73.0.3683.111',
239 '75.0.3761.2',
240 '74.0.3729.78',
241 '74.0.3729.77',
242 '75.0.3761.1',
243 '75.0.3761.0',
244 '73.0.3683.110',
245 '74.0.3729.76',
246 '74.0.3729.75',
247 '75.0.3760.0',
248 '74.0.3729.74',
249 '75.0.3759.8',
250 '75.0.3759.7',
251 '75.0.3759.6',
252 '74.0.3729.73',
253 '75.0.3759.5',
254 '74.0.3729.72',
255 '73.0.3683.109',
256 '75.0.3759.4',
257 '75.0.3759.3',
258 '74.0.3729.71',
259 '75.0.3759.2',
260 '74.0.3729.70',
261 '73.0.3683.108',
262 '74.0.3729.69',
263 '75.0.3759.1',
264 '75.0.3759.0',
265 '74.0.3729.68',
266 '73.0.3683.107',
267 '74.0.3729.67',
268 '75.0.3758.1',
269 '75.0.3758.0',
270 '74.0.3729.66',
271 '73.0.3683.106',
272 '74.0.3729.65',
273 '75.0.3757.1',
274 '75.0.3757.0',
275 '74.0.3729.64',
276 '73.0.3683.105',
277 '74.0.3729.63',
278 '75.0.3756.1',
279 '75.0.3756.0',
280 '74.0.3729.62',
281 '73.0.3683.104',
282 '75.0.3755.3',
283 '75.0.3755.2',
284 '73.0.3683.103',
285 '75.0.3755.1',
286 '75.0.3755.0',
287 '74.0.3729.61',
288 '73.0.3683.102',
289 '74.0.3729.60',
290 '75.0.3754.2',
291 '74.0.3729.59',
292 '75.0.3753.4',
293 '74.0.3729.58',
294 '75.0.3754.1',
295 '75.0.3754.0',
296 '74.0.3729.57',
297 '73.0.3683.101',
298 '75.0.3753.3',
299 '75.0.3752.2',
300 '75.0.3753.2',
301 '74.0.3729.56',
302 '75.0.3753.1',
303 '75.0.3753.0',
304 '74.0.3729.55',
305 '73.0.3683.100',
306 '74.0.3729.54',
307 '75.0.3752.1',
308 '75.0.3752.0',
309 '74.0.3729.53',
310 '73.0.3683.99',
311 '74.0.3729.52',
312 '75.0.3751.1',
313 '75.0.3751.0',
314 '74.0.3729.51',
315 '73.0.3683.98',
316 '74.0.3729.50',
317 '75.0.3750.0',
318 '74.0.3729.49',
319 '74.0.3729.48',
320 '74.0.3729.47',
321 '75.0.3749.3',
322 '74.0.3729.46',
323 '73.0.3683.97',
324 '75.0.3749.2',
325 '74.0.3729.45',
326 '75.0.3749.1',
327 '75.0.3749.0',
328 '74.0.3729.44',
329 '73.0.3683.96',
330 '74.0.3729.43',
331 '74.0.3729.42',
332 '75.0.3748.1',
333 '75.0.3748.0',
334 '74.0.3729.41',
335 '75.0.3747.1',
336 '73.0.3683.95',
337 '75.0.3746.4',
338 '74.0.3729.40',
339 '74.0.3729.39',
340 '75.0.3747.0',
341 '75.0.3746.3',
342 '75.0.3746.2',
343 '74.0.3729.38',
344 '75.0.3746.1',
345 '75.0.3746.0',
346 '74.0.3729.37',
347 '73.0.3683.94',
348 '75.0.3745.5',
349 '75.0.3745.4',
350 '75.0.3745.3',
351 '75.0.3745.2',
352 '74.0.3729.36',
353 '75.0.3745.1',
354 '75.0.3745.0',
355 '75.0.3744.2',
356 '74.0.3729.35',
357 '73.0.3683.93',
358 '74.0.3729.34',
359 '75.0.3744.1',
360 '75.0.3744.0',
361 '74.0.3729.33',
362 '73.0.3683.92',
363 '74.0.3729.32',
364 '74.0.3729.31',
365 '73.0.3683.91',
366 '75.0.3741.2',
367 '75.0.3740.5',
368 '74.0.3729.30',
369 '75.0.3741.1',
370 '75.0.3741.0',
371 '74.0.3729.29',
372 '75.0.3740.4',
373 '73.0.3683.90',
374 '74.0.3729.28',
375 '75.0.3740.3',
376 '73.0.3683.89',
377 '75.0.3740.2',
378 '74.0.3729.27',
379 '75.0.3740.1',
380 '75.0.3740.0',
381 '74.0.3729.26',
382 '73.0.3683.88',
383 '73.0.3683.87',
384 '74.0.3729.25',
385 '75.0.3739.1',
386 '75.0.3739.0',
387 '73.0.3683.86',
388 '74.0.3729.24',
389 '73.0.3683.85',
390 '75.0.3738.4',
391 '75.0.3738.3',
392 '75.0.3738.2',
393 '75.0.3738.1',
394 '75.0.3738.0',
395 '74.0.3729.23',
396 '73.0.3683.84',
397 '74.0.3729.22',
398 '74.0.3729.21',
399 '75.0.3737.1',
400 '75.0.3737.0',
401 '74.0.3729.20',
402 '73.0.3683.83',
403 '74.0.3729.19',
404 '75.0.3736.1',
405 '75.0.3736.0',
406 '74.0.3729.18',
407 '73.0.3683.82',
408 '74.0.3729.17',
409 '75.0.3735.1',
410 '75.0.3735.0',
411 '74.0.3729.16',
412 '73.0.3683.81',
413 '75.0.3734.1',
414 '75.0.3734.0',
415 '74.0.3729.15',
416 '73.0.3683.80',
417 '74.0.3729.14',
418 '75.0.3733.1',
419 '75.0.3733.0',
420 '75.0.3732.1',
421 '74.0.3729.13',
422 '74.0.3729.12',
423 '73.0.3683.79',
424 '74.0.3729.11',
425 '75.0.3732.0',
426 '74.0.3729.10',
427 '73.0.3683.78',
428 '74.0.3729.9',
429 '74.0.3729.8',
430 '74.0.3729.7',
431 '75.0.3731.3',
432 '75.0.3731.2',
433 '75.0.3731.0',
434 '74.0.3729.6',
435 '73.0.3683.77',
436 '73.0.3683.76',
437 '75.0.3730.5',
438 '75.0.3730.4',
439 '73.0.3683.75',
440 '74.0.3729.5',
441 '73.0.3683.74',
442 '75.0.3730.3',
443 '75.0.3730.2',
444 '74.0.3729.4',
445 '73.0.3683.73',
446 '73.0.3683.72',
447 '75.0.3730.1',
448 '75.0.3730.0',
449 '74.0.3729.3',
450 '73.0.3683.71',
451 '74.0.3729.2',
452 '73.0.3683.70',
453 '74.0.3729.1',
454 '74.0.3729.0',
455 '74.0.3726.4',
456 '73.0.3683.69',
457 '74.0.3726.3',
458 '74.0.3728.0',
459 '74.0.3726.2',
460 '73.0.3683.68',
461 '74.0.3726.1',
462 '74.0.3726.0',
463 '74.0.3725.4',
464 '73.0.3683.67',
465 '73.0.3683.66',
466 '74.0.3725.3',
467 '74.0.3725.2',
468 '74.0.3725.1',
469 '74.0.3724.8',
470 '74.0.3725.0',
471 '73.0.3683.65',
472 '74.0.3724.7',
473 '74.0.3724.6',
474 '74.0.3724.5',
475 '74.0.3724.4',
476 '74.0.3724.3',
477 '74.0.3724.2',
478 '74.0.3724.1',
479 '74.0.3724.0',
480 '73.0.3683.64',
481 '74.0.3723.1',
482 '74.0.3723.0',
483 '73.0.3683.63',
484 '74.0.3722.1',
485 '74.0.3722.0',
486 '73.0.3683.62',
487 '74.0.3718.9',
488 '74.0.3702.3',
489 '74.0.3721.3',
490 '74.0.3721.2',
491 '74.0.3721.1',
492 '74.0.3721.0',
493 '74.0.3720.6',
494 '73.0.3683.61',
495 '72.0.3626.122',
496 '73.0.3683.60',
497 '74.0.3720.5',
498 '72.0.3626.121',
499 '74.0.3718.8',
500 '74.0.3720.4',
501 '74.0.3720.3',
502 '74.0.3718.7',
503 '74.0.3720.2',
504 '74.0.3720.1',
505 '74.0.3720.0',
506 '74.0.3718.6',
507 '74.0.3719.5',
508 '73.0.3683.59',
509 '74.0.3718.5',
510 '74.0.3718.4',
511 '74.0.3719.4',
512 '74.0.3719.3',
513 '74.0.3719.2',
514 '74.0.3719.1',
515 '73.0.3683.58',
516 '74.0.3719.0',
517 '73.0.3683.57',
518 '73.0.3683.56',
519 '74.0.3718.3',
520 '73.0.3683.55',
521 '74.0.3718.2',
522 '74.0.3718.1',
523 '74.0.3718.0',
524 '73.0.3683.54',
525 '74.0.3717.2',
526 '73.0.3683.53',
527 '74.0.3717.1',
528 '74.0.3717.0',
529 '73.0.3683.52',
530 '74.0.3716.1',
531 '74.0.3716.0',
532 '73.0.3683.51',
533 '74.0.3715.1',
534 '74.0.3715.0',
535 '73.0.3683.50',
536 '74.0.3711.2',
537 '74.0.3714.2',
538 '74.0.3713.3',
539 '74.0.3714.1',
540 '74.0.3714.0',
541 '73.0.3683.49',
542 '74.0.3713.1',
543 '74.0.3713.0',
544 '72.0.3626.120',
545 '73.0.3683.48',
546 '74.0.3712.2',
547 '74.0.3712.1',
548 '74.0.3712.0',
549 '73.0.3683.47',
550 '72.0.3626.119',
551 '73.0.3683.46',
552 '74.0.3710.2',
553 '72.0.3626.118',
554 '74.0.3711.1',
555 '74.0.3711.0',
556 '73.0.3683.45',
557 '72.0.3626.117',
558 '74.0.3710.1',
559 '74.0.3710.0',
560 '73.0.3683.44',
561 '72.0.3626.116',
562 '74.0.3709.1',
563 '74.0.3709.0',
564 '74.0.3704.9',
565 '73.0.3683.43',
566 '72.0.3626.115',
567 '74.0.3704.8',
568 '74.0.3704.7',
569 '74.0.3708.0',
570 '74.0.3706.7',
571 '74.0.3704.6',
572 '73.0.3683.42',
573 '72.0.3626.114',
574 '74.0.3706.6',
575 '72.0.3626.113',
576 '74.0.3704.5',
577 '74.0.3706.5',
578 '74.0.3706.4',
579 '74.0.3706.3',
580 '74.0.3706.2',
581 '74.0.3706.1',
582 '74.0.3706.0',
583 '73.0.3683.41',
584 '72.0.3626.112',
585 '74.0.3705.1',
586 '74.0.3705.0',
587 '73.0.3683.40',
588 '72.0.3626.111',
589 '73.0.3683.39',
590 '74.0.3704.4',
591 '73.0.3683.38',
592 '74.0.3704.3',
593 '74.0.3704.2',
594 '74.0.3704.1',
595 '74.0.3704.0',
596 '73.0.3683.37',
597 '72.0.3626.110',
598 '72.0.3626.109',
599 '74.0.3703.3',
600 '74.0.3703.2',
601 '73.0.3683.36',
602 '74.0.3703.1',
603 '74.0.3703.0',
604 '73.0.3683.35',
605 '72.0.3626.108',
606 '74.0.3702.2',
607 '74.0.3699.3',
608 '74.0.3702.1',
609 '74.0.3702.0',
610 '73.0.3683.34',
611 '72.0.3626.107',
612 '73.0.3683.33',
613 '74.0.3701.1',
614 '74.0.3701.0',
615 '73.0.3683.32',
616 '73.0.3683.31',
617 '72.0.3626.105',
618 '74.0.3700.1',
619 '74.0.3700.0',
620 '73.0.3683.29',
621 '72.0.3626.103',
622 '74.0.3699.2',
623 '74.0.3699.1',
624 '74.0.3699.0',
625 '73.0.3683.28',
626 '72.0.3626.102',
627 '73.0.3683.27',
628 '73.0.3683.26',
629 '74.0.3698.0',
630 '74.0.3696.2',
631 '72.0.3626.101',
632 '73.0.3683.25',
633 '74.0.3696.1',
634 '74.0.3696.0',
635 '74.0.3694.8',
636 '72.0.3626.100',
637 '74.0.3694.7',
638 '74.0.3694.6',
639 '74.0.3694.5',
640 '74.0.3694.4',
641 '72.0.3626.99',
642 '72.0.3626.98',
643 '74.0.3694.3',
644 '73.0.3683.24',
645 '72.0.3626.97',
646 '72.0.3626.96',
647 '72.0.3626.95',
648 '73.0.3683.23',
649 '72.0.3626.94',
650 '73.0.3683.22',
651 '73.0.3683.21',
652 '72.0.3626.93',
653 '74.0.3694.2',
654 '72.0.3626.92',
655 '74.0.3694.1',
656 '74.0.3694.0',
657 '74.0.3693.6',
658 '73.0.3683.20',
659 '72.0.3626.91',
660 '74.0.3693.5',
661 '74.0.3693.4',
662 '74.0.3693.3',
663 '74.0.3693.2',
664 '73.0.3683.19',
665 '74.0.3693.1',
666 '74.0.3693.0',
667 '73.0.3683.18',
668 '72.0.3626.90',
669 '74.0.3692.1',
670 '74.0.3692.0',
671 '73.0.3683.17',
672 '72.0.3626.89',
673 '74.0.3687.3',
674 '74.0.3691.1',
675 '74.0.3691.0',
676 '73.0.3683.16',
677 '72.0.3626.88',
678 '72.0.3626.87',
679 '73.0.3683.15',
680 '74.0.3690.1',
681 '74.0.3690.0',
682 '73.0.3683.14',
683 '72.0.3626.86',
684 '73.0.3683.13',
685 '73.0.3683.12',
686 '74.0.3689.1',
687 '74.0.3689.0',
688 '73.0.3683.11',
689 '72.0.3626.85',
690 '73.0.3683.10',
691 '72.0.3626.84',
692 '73.0.3683.9',
693 '74.0.3688.1',
694 '74.0.3688.0',
695 '73.0.3683.8',
696 '72.0.3626.83',
697 '74.0.3687.2',
698 '74.0.3687.1',
699 '74.0.3687.0',
700 '73.0.3683.7',
701 '72.0.3626.82',
702 '74.0.3686.4',
703 '72.0.3626.81',
704 '74.0.3686.3',
705 '74.0.3686.2',
706 '74.0.3686.1',
707 '74.0.3686.0',
708 '73.0.3683.6',
709 '72.0.3626.80',
710 '74.0.3685.1',
711 '74.0.3685.0',
712 '73.0.3683.5',
713 '72.0.3626.79',
714 '74.0.3684.1',
715 '74.0.3684.0',
716 '73.0.3683.4',
717 '72.0.3626.78',
718 '72.0.3626.77',
719 '73.0.3683.3',
720 '73.0.3683.2',
721 '72.0.3626.76',
722 '73.0.3683.1',
723 '73.0.3683.0',
724 '72.0.3626.75',
725 '71.0.3578.141',
726 '73.0.3682.1',
727 '73.0.3682.0',
728 '72.0.3626.74',
729 '71.0.3578.140',
730 '73.0.3681.4',
731 '73.0.3681.3',
732 '73.0.3681.2',
733 '73.0.3681.1',
734 '73.0.3681.0',
735 '72.0.3626.73',
736 '71.0.3578.139',
737 '72.0.3626.72',
738 '72.0.3626.71',
739 '73.0.3680.1',
740 '73.0.3680.0',
741 '72.0.3626.70',
742 '71.0.3578.138',
743 '73.0.3678.2',
744 '73.0.3679.1',
745 '73.0.3679.0',
746 '72.0.3626.69',
747 '71.0.3578.137',
748 '73.0.3678.1',
749 '73.0.3678.0',
750 '71.0.3578.136',
751 '73.0.3677.1',
752 '73.0.3677.0',
753 '72.0.3626.68',
754 '72.0.3626.67',
755 '71.0.3578.135',
756 '73.0.3676.1',
757 '73.0.3676.0',
758 '73.0.3674.2',
759 '72.0.3626.66',
760 '71.0.3578.134',
761 '73.0.3674.1',
762 '73.0.3674.0',
763 '72.0.3626.65',
764 '71.0.3578.133',
765 '73.0.3673.2',
766 '73.0.3673.1',
767 '73.0.3673.0',
768 '72.0.3626.64',
769 '71.0.3578.132',
770 '72.0.3626.63',
771 '72.0.3626.62',
772 '72.0.3626.61',
773 '72.0.3626.60',
774 '73.0.3672.1',
775 '73.0.3672.0',
776 '72.0.3626.59',
777 '71.0.3578.131',
778 '73.0.3671.3',
779 '73.0.3671.2',
780 '73.0.3671.1',
781 '73.0.3671.0',
782 '72.0.3626.58',
783 '71.0.3578.130',
784 '73.0.3670.1',
785 '73.0.3670.0',
786 '72.0.3626.57',
787 '71.0.3578.129',
788 '73.0.3669.1',
789 '73.0.3669.0',
790 '72.0.3626.56',
791 '71.0.3578.128',
792 '73.0.3668.2',
793 '73.0.3668.1',
794 '73.0.3668.0',
795 '72.0.3626.55',
796 '71.0.3578.127',
797 '73.0.3667.2',
798 '73.0.3667.1',
799 '73.0.3667.0',
800 '72.0.3626.54',
801 '71.0.3578.126',
802 '73.0.3666.1',
803 '73.0.3666.0',
804 '72.0.3626.53',
805 '71.0.3578.125',
806 '73.0.3665.4',
807 '73.0.3665.3',
808 '72.0.3626.52',
809 '73.0.3665.2',
810 '73.0.3664.4',
811 '73.0.3665.1',
812 '73.0.3665.0',
813 '72.0.3626.51',
814 '71.0.3578.124',
815 '72.0.3626.50',
816 '73.0.3664.3',
817 '73.0.3664.2',
818 '73.0.3664.1',
819 '73.0.3664.0',
820 '73.0.3663.2',
821 '72.0.3626.49',
822 '71.0.3578.123',
823 '73.0.3663.1',
824 '73.0.3663.0',
825 '72.0.3626.48',
826 '71.0.3578.122',
827 '73.0.3662.1',
828 '73.0.3662.0',
829 '72.0.3626.47',
830 '71.0.3578.121',
831 '73.0.3661.1',
832 '72.0.3626.46',
833 '73.0.3661.0',
834 '72.0.3626.45',
835 '71.0.3578.120',
836 '73.0.3660.2',
837 '73.0.3660.1',
838 '73.0.3660.0',
839 '72.0.3626.44',
840 '71.0.3578.119',
841 '73.0.3659.1',
842 '73.0.3659.0',
843 '72.0.3626.43',
844 '71.0.3578.118',
845 '73.0.3658.1',
846 '73.0.3658.0',
847 '72.0.3626.42',
848 '71.0.3578.117',
849 '73.0.3657.1',
850 '73.0.3657.0',
851 '72.0.3626.41',
852 '71.0.3578.116',
853 '73.0.3656.1',
854 '73.0.3656.0',
855 '72.0.3626.40',
856 '71.0.3578.115',
857 '73.0.3655.1',
858 '73.0.3655.0',
859 '72.0.3626.39',
860 '71.0.3578.114',
861 '73.0.3654.1',
862 '73.0.3654.0',
863 '72.0.3626.38',
864 '71.0.3578.113',
865 '73.0.3653.1',
866 '73.0.3653.0',
867 '72.0.3626.37',
868 '71.0.3578.112',
869 '73.0.3652.1',
870 '73.0.3652.0',
871 '72.0.3626.36',
872 '71.0.3578.111',
873 '73.0.3651.1',
874 '73.0.3651.0',
875 '72.0.3626.35',
876 '71.0.3578.110',
877 '73.0.3650.1',
878 '73.0.3650.0',
879 '72.0.3626.34',
880 '71.0.3578.109',
881 '73.0.3649.1',
882 '73.0.3649.0',
883 '72.0.3626.33',
884 '71.0.3578.108',
885 '73.0.3648.2',
886 '73.0.3648.1',
887 '73.0.3648.0',
888 '72.0.3626.32',
889 '71.0.3578.107',
890 '73.0.3647.2',
891 '73.0.3647.1',
892 '73.0.3647.0',
893 '72.0.3626.31',
894 '71.0.3578.106',
895 '73.0.3635.3',
896 '73.0.3646.2',
897 '73.0.3646.1',
898 '73.0.3646.0',
899 '72.0.3626.30',
900 '71.0.3578.105',
901 '72.0.3626.29',
902 '73.0.3645.2',
903 '73.0.3645.1',
904 '73.0.3645.0',
905 '72.0.3626.28',
906 '71.0.3578.104',
907 '72.0.3626.27',
908 '72.0.3626.26',
909 '72.0.3626.25',
910 '72.0.3626.24',
911 '73.0.3644.0',
912 '73.0.3643.2',
913 '72.0.3626.23',
914 '71.0.3578.103',
915 '73.0.3643.1',
916 '73.0.3643.0',
917 '72.0.3626.22',
918 '71.0.3578.102',
919 '73.0.3642.1',
920 '73.0.3642.0',
921 '72.0.3626.21',
922 '71.0.3578.101',
923 '73.0.3641.1',
924 '73.0.3641.0',
925 '72.0.3626.20',
926 '71.0.3578.100',
927 '72.0.3626.19',
928 '73.0.3640.1',
929 '73.0.3640.0',
930 '72.0.3626.18',
931 '73.0.3639.1',
932 '71.0.3578.99',
933 '73.0.3639.0',
934 '72.0.3626.17',
935 '73.0.3638.2',
936 '72.0.3626.16',
937 '73.0.3638.1',
938 '73.0.3638.0',
939 '72.0.3626.15',
940 '71.0.3578.98',
941 '73.0.3635.2',
942 '71.0.3578.97',
943 '73.0.3637.1',
944 '73.0.3637.0',
945 '72.0.3626.14',
946 '71.0.3578.96',
947 '71.0.3578.95',
948 '72.0.3626.13',
949 '71.0.3578.94',
950 '73.0.3636.2',
951 '71.0.3578.93',
952 '73.0.3636.1',
953 '73.0.3636.0',
954 '72.0.3626.12',
955 '71.0.3578.92',
956 '73.0.3635.1',
957 '73.0.3635.0',
958 '72.0.3626.11',
959 '71.0.3578.91',
960 '73.0.3634.2',
961 '73.0.3634.1',
962 '73.0.3634.0',
963 '72.0.3626.10',
964 '71.0.3578.90',
965 '71.0.3578.89',
966 '73.0.3633.2',
967 '73.0.3633.1',
968 '73.0.3633.0',
969 '72.0.3610.4',
970 '72.0.3626.9',
971 '71.0.3578.88',
972 '73.0.3632.5',
973 '73.0.3632.4',
974 '73.0.3632.3',
975 '73.0.3632.2',
976 '73.0.3632.1',
977 '73.0.3632.0',
978 '72.0.3626.8',
979 '71.0.3578.87',
980 '73.0.3631.2',
981 '73.0.3631.1',
982 '73.0.3631.0',
983 '72.0.3626.7',
984 '71.0.3578.86',
985 '72.0.3626.6',
986 '73.0.3630.1',
987 '73.0.3630.0',
988 '72.0.3626.5',
989 '71.0.3578.85',
990 '72.0.3626.4',
991 '73.0.3628.3',
992 '73.0.3628.2',
993 '73.0.3629.1',
994 '73.0.3629.0',
995 '72.0.3626.3',
996 '71.0.3578.84',
997 '73.0.3628.1',
998 '73.0.3628.0',
999 '71.0.3578.83',
1000 '73.0.3627.1',
1001 '73.0.3627.0',
1002 '72.0.3626.2',
1003 '71.0.3578.82',
1004 '71.0.3578.81',
1005 '71.0.3578.80',
1006 '72.0.3626.1',
1007 '72.0.3626.0',
1008 '71.0.3578.79',
1009 '70.0.3538.124',
1010 '71.0.3578.78',
1011 '72.0.3623.4',
1012 '72.0.3625.2',
1013 '72.0.3625.1',
1014 '72.0.3625.0',
1015 '71.0.3578.77',
1016 '70.0.3538.123',
1017 '72.0.3624.4',
1018 '72.0.3624.3',
1019 '72.0.3624.2',
1020 '71.0.3578.76',
1021 '72.0.3624.1',
1022 '72.0.3624.0',
1023 '72.0.3623.3',
1024 '71.0.3578.75',
1025 '70.0.3538.122',
1026 '71.0.3578.74',
1027 '72.0.3623.2',
1028 '72.0.3610.3',
1029 '72.0.3623.1',
1030 '72.0.3623.0',
1031 '72.0.3622.3',
1032 '72.0.3622.2',
1033 '71.0.3578.73',
1034 '70.0.3538.121',
1035 '72.0.3622.1',
1036 '72.0.3622.0',
1037 '71.0.3578.72',
1038 '70.0.3538.120',
1039 '72.0.3621.1',
1040 '72.0.3621.0',
1041 '71.0.3578.71',
1042 '70.0.3538.119',
1043 '72.0.3620.1',
1044 '72.0.3620.0',
1045 '71.0.3578.70',
1046 '70.0.3538.118',
1047 '71.0.3578.69',
1048 '72.0.3619.1',
1049 '72.0.3619.0',
1050 '71.0.3578.68',
1051 '70.0.3538.117',
1052 '71.0.3578.67',
1053 '72.0.3618.1',
1054 '72.0.3618.0',
1055 '71.0.3578.66',
1056 '70.0.3538.116',
1057 '72.0.3617.1',
1058 '72.0.3617.0',
1059 '71.0.3578.65',
1060 '70.0.3538.115',
1061 '72.0.3602.3',
1062 '71.0.3578.64',
1063 '72.0.3616.1',
1064 '72.0.3616.0',
1065 '71.0.3578.63',
1066 '70.0.3538.114',
1067 '71.0.3578.62',
1068 '72.0.3615.1',
1069 '72.0.3615.0',
1070 '71.0.3578.61',
1071 '70.0.3538.113',
1072 '72.0.3614.1',
1073 '72.0.3614.0',
1074 '71.0.3578.60',
1075 '70.0.3538.112',
1076 '72.0.3613.1',
1077 '72.0.3613.0',
1078 '71.0.3578.59',
1079 '70.0.3538.111',
1080 '72.0.3612.2',
1081 '72.0.3612.1',
1082 '72.0.3612.0',
1083 '70.0.3538.110',
1084 '71.0.3578.58',
1085 '70.0.3538.109',
1086 '72.0.3611.2',
1087 '72.0.3611.1',
1088 '72.0.3611.0',
1089 '71.0.3578.57',
1090 '70.0.3538.108',
1091 '72.0.3610.2',
1092 '71.0.3578.56',
1093 '71.0.3578.55',
1094 '72.0.3610.1',
1095 '72.0.3610.0',
1096 '71.0.3578.54',
1097 '70.0.3538.107',
1098 '71.0.3578.53',
1099 '72.0.3609.3',
1100 '71.0.3578.52',
1101 '72.0.3609.2',
1102 '71.0.3578.51',
1103 '72.0.3608.5',
1104 '72.0.3609.1',
1105 '72.0.3609.0',
1106 '71.0.3578.50',
1107 '70.0.3538.106',
1108 '72.0.3608.4',
1109 '72.0.3608.3',
1110 '72.0.3608.2',
1111 '71.0.3578.49',
1112 '72.0.3608.1',
1113 '72.0.3608.0',
1114 '70.0.3538.105',
1115 '71.0.3578.48',
1116 '72.0.3607.1',
1117 '72.0.3607.0',
1118 '71.0.3578.47',
1119 '70.0.3538.104',
1120 '72.0.3606.2',
1121 '72.0.3606.1',
1122 '72.0.3606.0',
1123 '71.0.3578.46',
1124 '70.0.3538.103',
1125 '70.0.3538.102',
1126 '72.0.3605.3',
1127 '72.0.3605.2',
1128 '72.0.3605.1',
1129 '72.0.3605.0',
1130 '71.0.3578.45',
1131 '70.0.3538.101',
1132 '71.0.3578.44',
1133 '71.0.3578.43',
1134 '70.0.3538.100',
1135 '70.0.3538.99',
1136 '71.0.3578.42',
1137 '72.0.3604.1',
1138 '72.0.3604.0',
1139 '71.0.3578.41',
1140 '70.0.3538.98',
1141 '71.0.3578.40',
1142 '72.0.3603.2',
1143 '72.0.3603.1',
1144 '72.0.3603.0',
1145 '71.0.3578.39',
1146 '70.0.3538.97',
1147 '72.0.3602.2',
1148 '71.0.3578.38',
1149 '71.0.3578.37',
1150 '72.0.3602.1',
1151 '72.0.3602.0',
1152 '71.0.3578.36',
1153 '70.0.3538.96',
1154 '72.0.3601.1',
1155 '72.0.3601.0',
1156 '71.0.3578.35',
1157 '70.0.3538.95',
1158 '72.0.3600.1',
1159 '72.0.3600.0',
1160 '71.0.3578.34',
1161 '70.0.3538.94',
1162 '72.0.3599.3',
1163 '72.0.3599.2',
1164 '72.0.3599.1',
1165 '72.0.3599.0',
1166 '71.0.3578.33',
1167 '70.0.3538.93',
1168 '72.0.3598.1',
1169 '72.0.3598.0',
1170 '71.0.3578.32',
1171 '70.0.3538.87',
1172 '72.0.3597.1',
1173 '72.0.3597.0',
1174 '72.0.3596.2',
1175 '71.0.3578.31',
1176 '70.0.3538.86',
1177 '71.0.3578.30',
1178 '71.0.3578.29',
1179 '72.0.3596.1',
1180 '72.0.3596.0',
1181 '71.0.3578.28',
1182 '70.0.3538.85',
1183 '72.0.3595.2',
1184 '72.0.3591.3',
1185 '72.0.3595.1',
1186 '72.0.3595.0',
1187 '71.0.3578.27',
1188 '70.0.3538.84',
1189 '72.0.3594.1',
1190 '72.0.3594.0',
1191 '71.0.3578.26',
1192 '70.0.3538.83',
1193 '72.0.3593.2',
1194 '72.0.3593.1',
1195 '72.0.3593.0',
1196 '71.0.3578.25',
1197 '70.0.3538.82',
1198 '72.0.3589.3',
1199 '72.0.3592.2',
1200 '72.0.3592.1',
1201 '72.0.3592.0',
1202 '71.0.3578.24',
1203 '72.0.3589.2',
1204 '70.0.3538.81',
1205 '70.0.3538.80',
1206 '72.0.3591.2',
1207 '72.0.3591.1',
1208 '72.0.3591.0',
1209 '71.0.3578.23',
1210 '70.0.3538.79',
1211 '71.0.3578.22',
1212 '72.0.3590.1',
1213 '72.0.3590.0',
1214 '71.0.3578.21',
1215 '70.0.3538.78',
1216 '70.0.3538.77',
1217 '72.0.3589.1',
1218 '72.0.3589.0',
1219 '71.0.3578.20',
1220 '70.0.3538.76',
1221 '71.0.3578.19',
1222 '70.0.3538.75',
1223 '72.0.3588.1',
1224 '72.0.3588.0',
1225 '71.0.3578.18',
1226 '70.0.3538.74',
1227 '72.0.3586.2',
1228 '72.0.3587.0',
1229 '71.0.3578.17',
1230 '70.0.3538.73',
1231 '72.0.3586.1',
1232 '72.0.3586.0',
1233 '71.0.3578.16',
1234 '70.0.3538.72',
1235 '72.0.3585.1',
1236 '72.0.3585.0',
1237 '71.0.3578.15',
1238 '70.0.3538.71',
1239 '71.0.3578.14',
1240 '72.0.3584.1',
1241 '72.0.3584.0',
1242 '71.0.3578.13',
1243 '70.0.3538.70',
1244 '72.0.3583.2',
1245 '71.0.3578.12',
1246 '72.0.3583.1',
1247 '72.0.3583.0',
1248 '71.0.3578.11',
1249 '70.0.3538.69',
1250 '71.0.3578.10',
1251 '72.0.3582.0',
1252 '72.0.3581.4',
1253 '71.0.3578.9',
1254 '70.0.3538.67',
1255 '72.0.3581.3',
1256 '72.0.3581.2',
1257 '72.0.3581.1',
1258 '72.0.3581.0',
1259 '71.0.3578.8',
1260 '70.0.3538.66',
1261 '72.0.3580.1',
1262 '72.0.3580.0',
1263 '71.0.3578.7',
1264 '70.0.3538.65',
1265 '71.0.3578.6',
1266 '72.0.3579.1',
1267 '72.0.3579.0',
1268 '71.0.3578.5',
1269 '70.0.3538.64',
1270 '71.0.3578.4',
1271 '71.0.3578.3',
1272 '71.0.3578.2',
1273 '71.0.3578.1',
1274 '71.0.3578.0',
1275 '70.0.3538.63',
1276 '69.0.3497.128',
1277 '70.0.3538.62',
1278 '70.0.3538.61',
1279 '70.0.3538.60',
1280 '70.0.3538.59',
1281 '71.0.3577.1',
1282 '71.0.3577.0',
1283 '70.0.3538.58',
1284 '69.0.3497.127',
1285 '71.0.3576.2',
1286 '71.0.3576.1',
1287 '71.0.3576.0',
1288 '70.0.3538.57',
1289 '70.0.3538.56',
1290 '71.0.3575.2',
1291 '70.0.3538.55',
1292 '69.0.3497.126',
1293 '70.0.3538.54',
1294 '71.0.3575.1',
1295 '71.0.3575.0',
1296 '71.0.3574.1',
1297 '71.0.3574.0',
1298 '70.0.3538.53',
1299 '69.0.3497.125',
1300 '70.0.3538.52',
1301 '71.0.3573.1',
1302 '71.0.3573.0',
1303 '70.0.3538.51',
1304 '69.0.3497.124',
1305 '71.0.3572.1',
1306 '71.0.3572.0',
1307 '70.0.3538.50',
1308 '69.0.3497.123',
1309 '71.0.3571.2',
1310 '70.0.3538.49',
1311 '69.0.3497.122',
1312 '71.0.3571.1',
1313 '71.0.3571.0',
1314 '70.0.3538.48',
1315 '69.0.3497.121',
1316 '71.0.3570.1',
1317 '71.0.3570.0',
1318 '70.0.3538.47',
1319 '69.0.3497.120',
1320 '71.0.3568.2',
1321 '71.0.3569.1',
1322 '71.0.3569.0',
1323 '70.0.3538.46',
1324 '69.0.3497.119',
1325 '70.0.3538.45',
1326 '71.0.3568.1',
1327 '71.0.3568.0',
1328 '70.0.3538.44',
1329 '69.0.3497.118',
1330 '70.0.3538.43',
1331 '70.0.3538.42',
1332 '71.0.3567.1',
1333 '71.0.3567.0',
1334 '70.0.3538.41',
1335 '69.0.3497.117',
1336 '71.0.3566.1',
1337 '71.0.3566.0',
1338 '70.0.3538.40',
1339 '69.0.3497.116',
1340 '71.0.3565.1',
1341 '71.0.3565.0',
1342 '70.0.3538.39',
1343 '69.0.3497.115',
1344 '71.0.3564.1',
1345 '71.0.3564.0',
1346 '70.0.3538.38',
1347 '69.0.3497.114',
1348 '71.0.3563.0',
1349 '71.0.3562.2',
1350 '70.0.3538.37',
1351 '69.0.3497.113',
1352 '70.0.3538.36',
1353 '70.0.3538.35',
1354 '71.0.3562.1',
1355 '71.0.3562.0',
1356 '70.0.3538.34',
1357 '69.0.3497.112',
1358 '70.0.3538.33',
1359 '71.0.3561.1',
1360 '71.0.3561.0',
1361 '70.0.3538.32',
1362 '69.0.3497.111',
1363 '71.0.3559.6',
1364 '71.0.3560.1',
1365 '71.0.3560.0',
1366 '71.0.3559.5',
1367 '71.0.3559.4',
1368 '70.0.3538.31',
1369 '69.0.3497.110',
1370 '71.0.3559.3',
1371 '70.0.3538.30',
1372 '69.0.3497.109',
1373 '71.0.3559.2',
1374 '71.0.3559.1',
1375 '71.0.3559.0',
1376 '70.0.3538.29',
1377 '69.0.3497.108',
1378 '71.0.3558.2',
1379 '71.0.3558.1',
1380 '71.0.3558.0',
1381 '70.0.3538.28',
1382 '69.0.3497.107',
1383 '71.0.3557.2',
1384 '71.0.3557.1',
1385 '71.0.3557.0',
1386 '70.0.3538.27',
1387 '69.0.3497.106',
1388 '71.0.3554.4',
1389 '70.0.3538.26',
1390 '71.0.3556.1',
1391 '71.0.3556.0',
1392 '70.0.3538.25',
1393 '71.0.3554.3',
1394 '69.0.3497.105',
1395 '71.0.3554.2',
1396 '70.0.3538.24',
1397 '69.0.3497.104',
1398 '71.0.3555.2',
1399 '70.0.3538.23',
1400 '71.0.3555.1',
1401 '71.0.3555.0',
1402 '70.0.3538.22',
1403 '69.0.3497.103',
1404 '71.0.3554.1',
1405 '71.0.3554.0',
1406 '70.0.3538.21',
1407 '69.0.3497.102',
1408 '71.0.3553.3',
1409 '70.0.3538.20',
1410 '69.0.3497.101',
1411 '71.0.3553.2',
1412 '69.0.3497.100',
1413 '71.0.3553.1',
1414 '71.0.3553.0',
1415 '70.0.3538.19',
1416 '69.0.3497.99',
1417 '69.0.3497.98',
1418 '69.0.3497.97',
1419 '71.0.3552.6',
1420 '71.0.3552.5',
1421 '71.0.3552.4',
1422 '71.0.3552.3',
1423 '71.0.3552.2',
1424 '71.0.3552.1',
1425 '71.0.3552.0',
1426 '70.0.3538.18',
1427 '69.0.3497.96',
1428 '71.0.3551.3',
1429 '71.0.3551.2',
1430 '71.0.3551.1',
1431 '71.0.3551.0',
1432 '70.0.3538.17',
1433 '69.0.3497.95',
1434 '71.0.3550.3',
1435 '71.0.3550.2',
1436 '71.0.3550.1',
1437 '71.0.3550.0',
1438 '70.0.3538.16',
1439 '69.0.3497.94',
1440 '71.0.3549.1',
1441 '71.0.3549.0',
1442 '70.0.3538.15',
1443 '69.0.3497.93',
1444 '69.0.3497.92',
1445 '71.0.3548.1',
1446 '71.0.3548.0',
1447 '70.0.3538.14',
1448 '69.0.3497.91',
1449 '71.0.3547.1',
1450 '71.0.3547.0',
1451 '70.0.3538.13',
1452 '69.0.3497.90',
1453 '71.0.3546.2',
1454 '69.0.3497.89',
1455 '71.0.3546.1',
1456 '71.0.3546.0',
1457 '70.0.3538.12',
1458 '69.0.3497.88',
1459 '71.0.3545.4',
1460 '71.0.3545.3',
1461 '71.0.3545.2',
1462 '71.0.3545.1',
1463 '71.0.3545.0',
1464 '70.0.3538.11',
1465 '69.0.3497.87',
1466 '71.0.3544.5',
1467 '71.0.3544.4',
1468 '71.0.3544.3',
1469 '71.0.3544.2',
1470 '71.0.3544.1',
1471 '71.0.3544.0',
1472 '69.0.3497.86',
1473 '70.0.3538.10',
1474 '69.0.3497.85',
1475 '70.0.3538.9',
1476 '69.0.3497.84',
1477 '71.0.3543.4',
1478 '70.0.3538.8',
1479 '71.0.3543.3',
1480 '71.0.3543.2',
1481 '71.0.3543.1',
1482 '71.0.3543.0',
1483 '70.0.3538.7',
1484 '69.0.3497.83',
1485 '71.0.3542.2',
1486 '71.0.3542.1',
1487 '71.0.3542.0',
1488 '70.0.3538.6',
1489 '69.0.3497.82',
1490 '69.0.3497.81',
1491 '71.0.3541.1',
1492 '71.0.3541.0',
1493 '70.0.3538.5',
1494 '69.0.3497.80',
1495 '71.0.3540.1',
1496 '71.0.3540.0',
1497 '70.0.3538.4',
1498 '69.0.3497.79',
1499 '70.0.3538.3',
1500 '71.0.3539.1',
1501 '71.0.3539.0',
1502 '69.0.3497.78',
1503 '68.0.3440.134',
1504 '69.0.3497.77',
1505 '70.0.3538.2',
1506 '70.0.3538.1',
1507 '70.0.3538.0',
1508 '69.0.3497.76',
1509 '68.0.3440.133',
1510 '69.0.3497.75',
1511 '70.0.3537.2',
1512 '70.0.3537.1',
1513 '70.0.3537.0',
1514 '69.0.3497.74',
1515 '68.0.3440.132',
1516 '70.0.3536.0',
1517 '70.0.3535.5',
1518 '70.0.3535.4',
1519 '70.0.3535.3',
1520 '69.0.3497.73',
1521 '68.0.3440.131',
1522 '70.0.3532.8',
1523 '70.0.3532.7',
1524 '69.0.3497.72',
1525 '69.0.3497.71',
1526 '70.0.3535.2',
1527 '70.0.3535.1',
1528 '70.0.3535.0',
1529 '69.0.3497.70',
1530 '68.0.3440.130',
1531 '69.0.3497.69',
1532 '68.0.3440.129',
1533 '70.0.3534.4',
1534 '70.0.3534.3',
1535 '70.0.3534.2',
1536 '70.0.3534.1',
1537 '70.0.3534.0',
1538 '69.0.3497.68',
1539 '68.0.3440.128',
1540 '70.0.3533.2',
1541 '70.0.3533.1',
1542 '70.0.3533.0',
1543 '69.0.3497.67',
1544 '68.0.3440.127',
1545 '70.0.3532.6',
1546 '70.0.3532.5',
1547 '70.0.3532.4',
1548 '69.0.3497.66',
1549 '68.0.3440.126',
1550 '70.0.3532.3',
1551 '70.0.3532.2',
1552 '70.0.3532.1',
1553 '69.0.3497.60',
1554 '69.0.3497.65',
1555 '69.0.3497.64',
1556 '70.0.3532.0',
1557 '70.0.3531.0',
1558 '70.0.3530.4',
1559 '70.0.3530.3',
1560 '70.0.3530.2',
1561 '69.0.3497.58',
1562 '68.0.3440.125',
1563 '69.0.3497.57',
1564 '69.0.3497.56',
1565 '69.0.3497.55',
1566 '69.0.3497.54',
1567 '70.0.3530.1',
1568 '70.0.3530.0',
1569 '69.0.3497.53',
1570 '68.0.3440.124',
1571 '69.0.3497.52',
1572 '70.0.3529.3',
1573 '70.0.3529.2',
1574 '70.0.3529.1',
1575 '70.0.3529.0',
1576 '69.0.3497.51',
1577 '70.0.3528.4',
1578 '68.0.3440.123',
1579 '70.0.3528.3',
1580 '70.0.3528.2',
1581 '70.0.3528.1',
1582 '70.0.3528.0',
1583 '69.0.3497.50',
1584 '68.0.3440.122',
1585 '70.0.3527.1',
1586 '70.0.3527.0',
1587 '69.0.3497.49',
1588 '68.0.3440.121',
1589 '70.0.3526.1',
1590 '70.0.3526.0',
1591 '68.0.3440.120',
1592 '69.0.3497.48',
1593 '69.0.3497.47',
1594 '68.0.3440.119',
1595 '68.0.3440.118',
1596 '70.0.3525.5',
1597 '70.0.3525.4',
1598 '70.0.3525.3',
1599 '68.0.3440.117',
1600 '69.0.3497.46',
1601 '70.0.3525.2',
1602 '70.0.3525.1',
1603 '70.0.3525.0',
1604 '69.0.3497.45',
1605 '68.0.3440.116',
1606 '70.0.3524.4',
1607 '70.0.3524.3',
1608 '69.0.3497.44',
1609 '70.0.3524.2',
1610 '70.0.3524.1',
1611 '70.0.3524.0',
1612 '70.0.3523.2',
1613 '69.0.3497.43',
1614 '68.0.3440.115',
1615 '70.0.3505.9',
1616 '69.0.3497.42',
1617 '70.0.3505.8',
1618 '70.0.3523.1',
1619 '70.0.3523.0',
1620 '69.0.3497.41',
1621 '68.0.3440.114',
1622 '70.0.3505.7',
1623 '69.0.3497.40',
1624 '70.0.3522.1',
1625 '70.0.3522.0',
1626 '70.0.3521.2',
1627 '69.0.3497.39',
1628 '68.0.3440.113',
1629 '70.0.3505.6',
1630 '70.0.3521.1',
1631 '70.0.3521.0',
1632 '69.0.3497.38',
1633 '68.0.3440.112',
1634 '70.0.3520.1',
1635 '70.0.3520.0',
1636 '69.0.3497.37',
1637 '68.0.3440.111',
1638 '70.0.3519.3',
1639 '70.0.3519.2',
1640 '70.0.3519.1',
1641 '70.0.3519.0',
1642 '69.0.3497.36',
1643 '68.0.3440.110',
1644 '70.0.3518.1',
1645 '70.0.3518.0',
1646 '69.0.3497.35',
1647 '69.0.3497.34',
1648 '68.0.3440.109',
1649 '70.0.3517.1',
1650 '70.0.3517.0',
1651 '69.0.3497.33',
1652 '68.0.3440.108',
1653 '69.0.3497.32',
1654 '70.0.3516.3',
1655 '70.0.3516.2',
1656 '70.0.3516.1',
1657 '70.0.3516.0',
1658 '69.0.3497.31',
1659 '68.0.3440.107',
1660 '70.0.3515.4',
1661 '68.0.3440.106',
1662 '70.0.3515.3',
1663 '70.0.3515.2',
1664 '70.0.3515.1',
1665 '70.0.3515.0',
1666 '69.0.3497.30',
1667 '68.0.3440.105',
1668 '68.0.3440.104',
1669 '70.0.3514.2',
1670 '70.0.3514.1',
1671 '70.0.3514.0',
1672 '69.0.3497.29',
1673 '68.0.3440.103',
1674 '70.0.3513.1',
1675 '70.0.3513.0',
1676 '69.0.3497.28',
f7a147e3
S
1677 )
1678 return _USER_AGENT_TPL % random.choice(_CHROME_VERSIONS)
1679
1680
3e669f36 1681std_headers = {
f7a147e3 1682 'User-Agent': random_user_agent(),
59ae15a5
PH
1683 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
1684 'Accept-Encoding': 'gzip, deflate',
1685 'Accept-Language': 'en-us,en;q=0.5',
3e669f36 1686}
f427df17 1687
5f6a1245 1688
fb37eb25
S
1689USER_AGENTS = {
1690 'Safari': 'Mozilla/5.0 (X11; Linux x86_64; rv:10.0) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27',
1691}
1692
1693
bf42a990
S
1694NO_DEFAULT = object()
1695
7105440c
YCH
1696ENGLISH_MONTH_NAMES = [
1697 'January', 'February', 'March', 'April', 'May', 'June',
1698 'July', 'August', 'September', 'October', 'November', 'December']
1699
f6717dec
S
1700MONTH_NAMES = {
1701 'en': ENGLISH_MONTH_NAMES,
1702 'fr': [
3e4185c3
S
1703 'janvier', 'février', 'mars', 'avril', 'mai', 'juin',
1704 'juillet', 'août', 'septembre', 'octobre', 'novembre', 'décembre'],
f6717dec 1705}
a942d6cb 1706
a7aaa398
S
1707KNOWN_EXTENSIONS = (
1708 'mp4', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'aac',
1709 'flv', 'f4v', 'f4a', 'f4b',
1710 'webm', 'ogg', 'ogv', 'oga', 'ogx', 'spx', 'opus',
1711 'mkv', 'mka', 'mk3d',
1712 'avi', 'divx',
1713 'mov',
1714 'asf', 'wmv', 'wma',
1715 '3gp', '3g2',
1716 'mp3',
1717 'flac',
1718 'ape',
1719 'wav',
1720 'f4f', 'f4m', 'm3u8', 'smil')
1721
c587cbb7 1722# needed for sanitizing filenames in restricted mode
c8827027 1723ACCENT_CHARS = dict(zip('ÂÃÄÀÁÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖŐØŒÙÚÛÜŰÝÞßàáâãäåæçèéêëìíîïðñòóôõöőøœùúûüűýþÿ',
fd35d8cd
JW
1724 itertools.chain('AAAAAA', ['AE'], 'CEEEEIIIIDNOOOOOOO', ['OE'], 'UUUUUY', ['TH', 'ss'],
1725 'aaaaaa', ['ae'], 'ceeeeiiiionooooooo', ['oe'], 'uuuuuy', ['th'], 'y')))
c587cbb7 1726
46f59e89
S
1727DATE_FORMATS = (
1728 '%d %B %Y',
1729 '%d %b %Y',
1730 '%B %d %Y',
cb655f34
S
1731 '%B %dst %Y',
1732 '%B %dnd %Y',
9d30c213 1733 '%B %drd %Y',
cb655f34 1734 '%B %dth %Y',
46f59e89 1735 '%b %d %Y',
cb655f34
S
1736 '%b %dst %Y',
1737 '%b %dnd %Y',
9d30c213 1738 '%b %drd %Y',
cb655f34 1739 '%b %dth %Y',
46f59e89
S
1740 '%b %dst %Y %I:%M',
1741 '%b %dnd %Y %I:%M',
9d30c213 1742 '%b %drd %Y %I:%M',
46f59e89
S
1743 '%b %dth %Y %I:%M',
1744 '%Y %m %d',
1745 '%Y-%m-%d',
bccdbd22 1746 '%Y.%m.%d.',
46f59e89 1747 '%Y/%m/%d',
81c13222 1748 '%Y/%m/%d %H:%M',
46f59e89 1749 '%Y/%m/%d %H:%M:%S',
1931a55e
THD
1750 '%Y%m%d%H%M',
1751 '%Y%m%d%H%M%S',
4f3fa23e 1752 '%Y%m%d',
0c1c6f4b 1753 '%Y-%m-%d %H:%M',
46f59e89
S
1754 '%Y-%m-%d %H:%M:%S',
1755 '%Y-%m-%d %H:%M:%S.%f',
5014558a 1756 '%Y-%m-%d %H:%M:%S:%f',
46f59e89
S
1757 '%d.%m.%Y %H:%M',
1758 '%d.%m.%Y %H.%M',
1759 '%Y-%m-%dT%H:%M:%SZ',
1760 '%Y-%m-%dT%H:%M:%S.%fZ',
1761 '%Y-%m-%dT%H:%M:%S.%f0Z',
1762 '%Y-%m-%dT%H:%M:%S',
1763 '%Y-%m-%dT%H:%M:%S.%f',
1764 '%Y-%m-%dT%H:%M',
c6eed6b8
S
1765 '%b %d %Y at %H:%M',
1766 '%b %d %Y at %H:%M:%S',
b555ae9b
S
1767 '%B %d %Y at %H:%M',
1768 '%B %d %Y at %H:%M:%S',
a63d9bd0 1769 '%H:%M %d-%b-%Y',
46f59e89
S
1770)
1771
1772DATE_FORMATS_DAY_FIRST = list(DATE_FORMATS)
1773DATE_FORMATS_DAY_FIRST.extend([
1774 '%d-%m-%Y',
1775 '%d.%m.%Y',
1776 '%d.%m.%y',
1777 '%d/%m/%Y',
1778 '%d/%m/%y',
1779 '%d/%m/%Y %H:%M:%S',
1780])
1781
1782DATE_FORMATS_MONTH_FIRST = list(DATE_FORMATS)
1783DATE_FORMATS_MONTH_FIRST.extend([
1784 '%m-%d-%Y',
1785 '%m.%d.%Y',
1786 '%m/%d/%Y',
1787 '%m/%d/%y',
1788 '%m/%d/%Y %H:%M:%S',
1789])
1790
06b3fe29 1791PACKED_CODES_RE = r"}\('(.+)',(\d+),(\d+),'([^']+)'\.split\('\|'\)"
22f5f5c6 1792JSON_LD_RE = r'(?is)<script[^>]+type=(["\']?)application/ld\+json\1[^>]*>(?P<json_ld>.+?)</script>'
06b3fe29 1793
7105440c 1794
d77c3dfd 1795def preferredencoding():
59ae15a5 1796 """Get preferred encoding.
d77c3dfd 1797
59ae15a5
PH
1798 Returns the best encoding scheme for the system, based on
1799 locale.getpreferredencoding() and some further tweaks.
1800 """
1801 try:
1802 pref = locale.getpreferredencoding()
28e614de 1803 'TEST'.encode(pref)
70a1165b 1804 except Exception:
59ae15a5 1805 pref = 'UTF-8'
bae611f2 1806
59ae15a5 1807 return pref
d77c3dfd 1808
f4bfd65f 1809
181c8655 1810def write_json_file(obj, fn):
1394646a 1811 """ Encode obj as JSON and write it to fn, atomically if possible """
181c8655 1812
92120217 1813 fn = encodeFilename(fn)
61ee5aeb 1814 if sys.version_info < (3, 0) and sys.platform != 'win32':
ec5f6016
JMF
1815 encoding = get_filesystem_encoding()
1816 # os.path.basename returns a bytes object, but NamedTemporaryFile
1817 # will fail if the filename contains non ascii characters unless we
1818 # use a unicode object
1819 path_basename = lambda f: os.path.basename(fn).decode(encoding)
1820 # the same for os.path.dirname
1821 path_dirname = lambda f: os.path.dirname(fn).decode(encoding)
1822 else:
1823 path_basename = os.path.basename
1824 path_dirname = os.path.dirname
1825
73159f99
S
1826 args = {
1827 'suffix': '.tmp',
ec5f6016
JMF
1828 'prefix': path_basename(fn) + '.',
1829 'dir': path_dirname(fn),
73159f99
S
1830 'delete': False,
1831 }
1832
181c8655
PH
1833 # In Python 2.x, json.dump expects a bytestream.
1834 # In Python 3.x, it writes to a character stream
1835 if sys.version_info < (3, 0):
73159f99 1836 args['mode'] = 'wb'
181c8655 1837 else:
73159f99
S
1838 args.update({
1839 'mode': 'w',
1840 'encoding': 'utf-8',
1841 })
1842
c86b6142 1843 tf = tempfile.NamedTemporaryFile(**compat_kwargs(args))
181c8655
PH
1844
1845 try:
1846 with tf:
45d86abe 1847 json.dump(obj, tf, ensure_ascii=False)
1394646a
IK
1848 if sys.platform == 'win32':
1849 # Need to remove existing file on Windows, else os.rename raises
1850 # WindowsError or FileExistsError.
1851 try:
1852 os.unlink(fn)
1853 except OSError:
1854 pass
9cd5f54e
R
1855 try:
1856 mask = os.umask(0)
1857 os.umask(mask)
1858 os.chmod(tf.name, 0o666 & ~mask)
1859 except OSError:
1860 pass
181c8655 1861 os.rename(tf.name, fn)
70a1165b 1862 except Exception:
181c8655
PH
1863 try:
1864 os.remove(tf.name)
1865 except OSError:
1866 pass
1867 raise
1868
1869
1870if sys.version_info >= (2, 7):
ee114368 1871 def find_xpath_attr(node, xpath, key, val=None):
59ae56fa 1872 """ Find the xpath xpath[@key=val] """
5d2354f1 1873 assert re.match(r'^[a-zA-Z_-]+$', key)
ee114368 1874 expr = xpath + ('[@%s]' % key if val is None else "[@%s='%s']" % (key, val))
59ae56fa
PH
1875 return node.find(expr)
1876else:
ee114368 1877 def find_xpath_attr(node, xpath, key, val=None):
810c10ba 1878 for f in node.findall(compat_xpath(xpath)):
ee114368
S
1879 if key not in f.attrib:
1880 continue
1881 if val is None or f.attrib.get(key) == val:
59ae56fa
PH
1882 return f
1883 return None
1884
d7e66d39
JMF
1885# On python2.6 the xml.etree.ElementTree.Element methods don't support
1886# the namespace parameter
5f6a1245
JW
1887
1888
d7e66d39
JMF
1889def xpath_with_ns(path, ns_map):
1890 components = [c.split(':') for c in path.split('/')]
1891 replaced = []
1892 for c in components:
1893 if len(c) == 1:
1894 replaced.append(c[0])
1895 else:
1896 ns, tag = c
1897 replaced.append('{%s}%s' % (ns_map[ns], tag))
1898 return '/'.join(replaced)
1899
d77c3dfd 1900
a41fb80c 1901def xpath_element(node, xpath, name=None, fatal=False, default=NO_DEFAULT):
578c0745 1902 def _find_xpath(xpath):
810c10ba 1903 return node.find(compat_xpath(xpath))
578c0745
S
1904
1905 if isinstance(xpath, (str, compat_str)):
1906 n = _find_xpath(xpath)
1907 else:
1908 for xp in xpath:
1909 n = _find_xpath(xp)
1910 if n is not None:
1911 break
d74bebd5 1912
8e636da4 1913 if n is None:
bf42a990
S
1914 if default is not NO_DEFAULT:
1915 return default
1916 elif fatal:
bf0ff932
PH
1917 name = xpath if name is None else name
1918 raise ExtractorError('Could not find XML element %s' % name)
1919 else:
1920 return None
a41fb80c
S
1921 return n
1922
1923
1924def xpath_text(node, xpath, name=None, fatal=False, default=NO_DEFAULT):
8e636da4
S
1925 n = xpath_element(node, xpath, name, fatal=fatal, default=default)
1926 if n is None or n == default:
1927 return n
1928 if n.text is None:
1929 if default is not NO_DEFAULT:
1930 return default
1931 elif fatal:
1932 name = xpath if name is None else name
1933 raise ExtractorError('Could not find XML element\'s text %s' % name)
1934 else:
1935 return None
1936 return n.text
a41fb80c
S
1937
1938
1939def xpath_attr(node, xpath, key, name=None, fatal=False, default=NO_DEFAULT):
1940 n = find_xpath_attr(node, xpath, key)
1941 if n is None:
1942 if default is not NO_DEFAULT:
1943 return default
1944 elif fatal:
1945 name = '%s[@%s]' % (xpath, key) if name is None else name
1946 raise ExtractorError('Could not find XML attribute %s' % name)
1947 else:
1948 return None
1949 return n.attrib[key]
bf0ff932
PH
1950
1951
9e6dd238 1952def get_element_by_id(id, html):
43e8fafd 1953 """Return the content of the tag with the specified ID in the passed HTML document"""
611c1dd9 1954 return get_element_by_attribute('id', id, html)
43e8fafd 1955
12ea2f30 1956
6f32a0b5
ZM
1957def get_element_html_by_id(id, html):
1958 """Return the html of the tag with the specified ID in the passed HTML document"""
1959 return get_element_html_by_attribute('id', id, html)
1960
1961
84c237fb 1962def get_element_by_class(class_name, html):
2af12ad9
TC
1963 """Return the content of the first tag with the specified class in the passed HTML document"""
1964 retval = get_elements_by_class(class_name, html)
1965 return retval[0] if retval else None
1966
1967
6f32a0b5
ZM
1968def get_element_html_by_class(class_name, html):
1969 """Return the html of the first tag with the specified class in the passed HTML document"""
1970 retval = get_elements_html_by_class(class_name, html)
1971 return retval[0] if retval else None
1972
1973
2af12ad9
TC
1974def get_element_by_attribute(attribute, value, html, escape_value=True):
1975 retval = get_elements_by_attribute(attribute, value, html, escape_value)
1976 return retval[0] if retval else None
1977
1978
6f32a0b5
ZM
1979def get_element_html_by_attribute(attribute, value, html, escape_value=True):
1980 retval = get_elements_html_by_attribute(attribute, value, html, escape_value)
1981 return retval[0] if retval else None
1982
1983
2af12ad9
TC
1984def get_elements_by_class(class_name, html):
1985 """Return the content of all tags with the specified class in the passed HTML document as a list"""
1986 return get_elements_by_attribute(
84c237fb
YCH
1987 'class', r'[^\'"]*\b%s\b[^\'"]*' % re.escape(class_name),
1988 html, escape_value=False)
1989
1990
6f32a0b5
ZM
1991def get_elements_html_by_class(class_name, html):
1992 """Return the html of all tags with the specified class in the passed HTML document as a list"""
1993 return get_elements_html_by_attribute(
1994 'class', r'[^\'"]*\b%s\b[^\'"]*' % re.escape(class_name),
1995 html, escape_value=False)
1996
1997
1998def get_elements_by_attribute(*args, **kwargs):
43e8fafd 1999 """Return the content of the tag with the specified attribute in the passed HTML document"""
6f32a0b5
ZM
2000 return [content for content, _ in get_elements_text_and_html_by_attribute(*args, **kwargs)]
2001
2002
2003def get_elements_html_by_attribute(*args, **kwargs):
2004 """Return the html of the tag with the specified attribute in the passed HTML document"""
2005 return [whole for _, whole in get_elements_text_and_html_by_attribute(*args, **kwargs)]
2006
2007
2008def get_elements_text_and_html_by_attribute(attribute, value, html, escape_value=True):
2009 """
2010 Return the text (content) and the html (whole) of the tag with the specified
2011 attribute in the passed HTML document
2012 """
9e6dd238 2013
0254f162
ZM
2014 value_quote_optional = '' if re.match(r'''[\s"'`=<>]''', value) else '?'
2015
84c237fb
YCH
2016 value = re.escape(value) if escape_value else value
2017
0254f162 2018 partial_element_re = r'''(?x)
6f32a0b5 2019 <(?P<tag>[a-zA-Z0-9:._-]+)
0254f162
ZM
2020 (?:\s(?:[^>"']|"[^"]*"|'[^']*')*)?
2021 \s%(attribute)s\s*=\s*(?P<_q>['"]%(vqo)s)(?-x:%(value)s)(?P=_q)
2022 ''' % {'attribute': re.escape(attribute), 'value': value, 'vqo': value_quote_optional}
38285056 2023
0254f162
ZM
2024 for m in re.finditer(partial_element_re, html):
2025 content, whole = get_element_text_and_html_by_tag(m.group('tag'), html[m.start():])
a921f407 2026
0254f162
ZM
2027 yield (
2028 unescapeHTML(re.sub(r'^(?P<q>["\'])(?P<content>.*)(?P=q)$', r'\g<content>', content, flags=re.DOTALL)),
2029 whole
2030 )
a921f407 2031
c5229f39 2032
6f32a0b5
ZM
2033class HTMLBreakOnClosingTagParser(compat_HTMLParser):
2034 """
2035 HTML parser which raises HTMLBreakOnClosingTagException upon reaching the
2036 closing tag for the first opening tag it has encountered, and can be used
2037 as a context manager
2038 """
2039
2040 class HTMLBreakOnClosingTagException(Exception):
2041 pass
2042
2043 def __init__(self):
2044 self.tagstack = collections.deque()
2045 compat_HTMLParser.__init__(self)
2046
2047 def __enter__(self):
2048 return self
2049
2050 def __exit__(self, *_):
2051 self.close()
2052
2053 def close(self):
2054 # handle_endtag does not return upon raising HTMLBreakOnClosingTagException,
2055 # so data remains buffered; we no longer have any interest in it, thus
2056 # override this method to discard it
2057 pass
2058
2059 def handle_starttag(self, tag, _):
2060 self.tagstack.append(tag)
2061
2062 def handle_endtag(self, tag):
2063 if not self.tagstack:
2064 raise compat_HTMLParseError('no tags in the stack')
2065 while self.tagstack:
2066 inner_tag = self.tagstack.pop()
2067 if inner_tag == tag:
2068 break
2069 else:
2070 raise compat_HTMLParseError(f'matching opening tag for closing {tag} tag not found')
2071 if not self.tagstack:
2072 raise self.HTMLBreakOnClosingTagException()
2073
2074
2075def get_element_text_and_html_by_tag(tag, html):
2076 """
2077 For the first element with the specified tag in the passed HTML document
2078 return its' content (text) and the whole element (html)
2079 """
2080 def find_or_raise(haystack, needle, exc):
2081 try:
2082 return haystack.index(needle)
2083 except ValueError:
2084 raise exc
2085 closing_tag = f'</{tag}>'
2086 whole_start = find_or_raise(
2087 html, f'<{tag}', compat_HTMLParseError(f'opening {tag} tag not found'))
2088 content_start = find_or_raise(
2089 html[whole_start:], '>', compat_HTMLParseError(f'malformed opening {tag} tag'))
2090 content_start += whole_start + 1
2091 with HTMLBreakOnClosingTagParser() as parser:
2092 parser.feed(html[whole_start:content_start])
2093 if not parser.tagstack or parser.tagstack[0] != tag:
2094 raise compat_HTMLParseError(f'parser did not match opening {tag} tag')
2095 offset = content_start
2096 while offset < len(html):
2097 next_closing_tag_start = find_or_raise(
2098 html[offset:], closing_tag,
2099 compat_HTMLParseError(f'closing {tag} tag not found'))
2100 next_closing_tag_end = next_closing_tag_start + len(closing_tag)
2101 try:
2102 parser.feed(html[offset:offset + next_closing_tag_end])
2103 offset += next_closing_tag_end
2104 except HTMLBreakOnClosingTagParser.HTMLBreakOnClosingTagException:
2105 return html[content_start:offset + next_closing_tag_start], \
2106 html[whole_start:offset + next_closing_tag_end]
2107 raise compat_HTMLParseError('unexpected end of html')
2108
2109
8bb56eee
BF
2110class HTMLAttributeParser(compat_HTMLParser):
2111 """Trivial HTML parser to gather the attributes for a single element"""
b6e0c7d2 2112
8bb56eee 2113 def __init__(self):
c5229f39 2114 self.attrs = {}
8bb56eee
BF
2115 compat_HTMLParser.__init__(self)
2116
2117 def handle_starttag(self, tag, attrs):
2118 self.attrs = dict(attrs)
2119
c5229f39 2120
73673ccf
FF
2121class HTMLListAttrsParser(compat_HTMLParser):
2122 """HTML parser to gather the attributes for the elements of a list"""
2123
2124 def __init__(self):
2125 compat_HTMLParser.__init__(self)
2126 self.items = []
2127 self._level = 0
2128
2129 def handle_starttag(self, tag, attrs):
2130 if tag == 'li' and self._level == 0:
2131 self.items.append(dict(attrs))
2132 self._level += 1
2133
2134 def handle_endtag(self, tag):
2135 self._level -= 1
2136
2137
8bb56eee
BF
2138def extract_attributes(html_element):
2139 """Given a string for an HTML element such as
2140 <el
2141 a="foo" B="bar" c="&98;az" d=boz
2142 empty= noval entity="&amp;"
2143 sq='"' dq="'"
2144 >
2145 Decode and return a dictionary of attributes.
2146 {
2147 'a': 'foo', 'b': 'bar', c: 'baz', d: 'boz',
2148 'empty': '', 'noval': None, 'entity': '&',
2149 'sq': '"', 'dq': '\''
2150 }.
2151 NB HTMLParser is stricter in Python 2.6 & 3.2 than in later versions,
2152 but the cases in the unit test will work for all of 2.6, 2.7, 3.2-3.5.
2153 """
2154 parser = HTMLAttributeParser()
b4a3d461
S
2155 try:
2156 parser.feed(html_element)
2157 parser.close()
2158 # Older Python may throw HTMLParseError in case of malformed HTML
2159 except compat_HTMLParseError:
2160 pass
8bb56eee 2161 return parser.attrs
9e6dd238 2162
c5229f39 2163
73673ccf
FF
2164def parse_list(webpage):
2165 """Given a string for an series of HTML <li> elements,
2166 return a dictionary of their attributes"""
2167 parser = HTMLListAttrsParser()
2168 parser.feed(webpage)
2169 parser.close()
2170 return parser.items
2171
2172
9e6dd238 2173def clean_html(html):
59ae15a5 2174 """Clean an HTML snippet into a readable string"""
dd622d7c
PH
2175
2176 if html is None: # Convenience for sanitizing descriptions etc.
2177 return html
2178
59ae15a5
PH
2179 # Newline vs <br />
2180 html = html.replace('\n', ' ')
edd9221c
TF
2181 html = re.sub(r'(?u)\s*<\s*br\s*/?\s*>\s*', '\n', html)
2182 html = re.sub(r'(?u)<\s*/\s*p\s*>\s*<\s*p[^>]*>', '\n', html)
59ae15a5
PH
2183 # Strip html tags
2184 html = re.sub('<.*?>', '', html)
2185 # Replace html entities
2186 html = unescapeHTML(html)
7decf895 2187 return html.strip()
9e6dd238
FV
2188
2189
d77c3dfd 2190def sanitize_open(filename, open_mode):
59ae15a5
PH
2191 """Try to open the given filename, and slightly tweak it if this fails.
2192
2193 Attempts to open the given filename. If this fails, it tries to change
2194 the filename slightly, step by step, until it's either able to open it
2195 or it fails and raises a final exception, like the standard open()
2196 function.
2197
2198 It returns the tuple (stream, definitive_file_name).
2199 """
2200 try:
28e614de 2201 if filename == '-':
59ae15a5
PH
2202 if sys.platform == 'win32':
2203 import msvcrt
2204 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
898280a0 2205 return (sys.stdout.buffer if hasattr(sys.stdout, 'buffer') else sys.stdout, filename)
59ae15a5
PH
2206 stream = open(encodeFilename(filename), open_mode)
2207 return (stream, filename)
2208 except (IOError, OSError) as err:
f45c185f
PH
2209 if err.errno in (errno.EACCES,):
2210 raise
59ae15a5 2211
f45c185f 2212 # In case of error, try to remove win32 forbidden chars
d55de57b 2213 alt_filename = sanitize_path(filename)
f45c185f
PH
2214 if alt_filename == filename:
2215 raise
2216 else:
2217 # An exception here should be caught in the caller
d55de57b 2218 stream = open(encodeFilename(alt_filename), open_mode)
f45c185f 2219 return (stream, alt_filename)
d77c3dfd
FV
2220
2221
2222def timeconvert(timestr):
59ae15a5
PH
2223 """Convert RFC 2822 defined time string into system timestamp"""
2224 timestamp = None
2225 timetuple = email.utils.parsedate_tz(timestr)
2226 if timetuple is not None:
2227 timestamp = email.utils.mktime_tz(timetuple)
2228 return timestamp
1c469a94 2229
5f6a1245 2230
796173d0 2231def sanitize_filename(s, restricted=False, is_id=False):
59ae15a5
PH
2232 """Sanitizes a string so it could be used as part of a filename.
2233 If restricted is set, use a stricter subset of allowed characters.
158af524
S
2234 Set is_id if this is not an arbitrary string, but an ID that should be kept
2235 if possible.
59ae15a5
PH
2236 """
2237 def replace_insane(char):
c587cbb7
AT
2238 if restricted and char in ACCENT_CHARS:
2239 return ACCENT_CHARS[char]
91dd88b9 2240 elif not restricted and char == '\n':
2241 return ' '
2242 elif char == '?' or ord(char) < 32 or ord(char) == 127:
59ae15a5
PH
2243 return ''
2244 elif char == '"':
2245 return '' if restricted else '\''
2246 elif char == ':':
2247 return '_-' if restricted else ' -'
2248 elif char in '\\/|*<>':
2249 return '_'
627dcfff 2250 if restricted and (char in '!&\'()[]{}$;`^,#' or char.isspace()):
59ae15a5
PH
2251 return '_'
2252 if restricted and ord(char) > 127:
2253 return '_'
2254 return char
2255
639f1cea 2256 if s == '':
2257 return ''
2aeb06d6
PH
2258 # Handle timestamps
2259 s = re.sub(r'[0-9]+(?::[0-9]+)+', lambda m: m.group(0).replace(':', '_'), s)
28e614de 2260 result = ''.join(map(replace_insane, s))
796173d0
PH
2261 if not is_id:
2262 while '__' in result:
2263 result = result.replace('__', '_')
2264 result = result.strip('_')
2265 # Common case of "Foreign band name - English song title"
2266 if restricted and result.startswith('-_'):
2267 result = result[2:]
5a42414b
PH
2268 if result.startswith('-'):
2269 result = '_' + result[len('-'):]
a7440261 2270 result = result.lstrip('.')
796173d0
PH
2271 if not result:
2272 result = '_'
59ae15a5 2273 return result
d77c3dfd 2274
5f6a1245 2275
c2934512 2276def sanitize_path(s, force=False):
a2aaf4db 2277 """Sanitizes and normalizes path on Windows"""
c2934512 2278 if sys.platform == 'win32':
c4218ac3 2279 force = False
c2934512 2280 drive_or_unc, _ = os.path.splitdrive(s)
2281 if sys.version_info < (2, 7) and not drive_or_unc:
2282 drive_or_unc, _ = os.path.splitunc(s)
2283 elif force:
2284 drive_or_unc = ''
2285 else:
a2aaf4db 2286 return s
c2934512 2287
be531ef1
S
2288 norm_path = os.path.normpath(remove_start(s, drive_or_unc)).split(os.path.sep)
2289 if drive_or_unc:
a2aaf4db
S
2290 norm_path.pop(0)
2291 sanitized_path = [
ec85ded8 2292 path_part if path_part in ['.', '..'] else re.sub(r'(?:[/<>:"\|\\?\*]|[\s.]$)', '#', path_part)
a2aaf4db 2293 for path_part in norm_path]
be531ef1
S
2294 if drive_or_unc:
2295 sanitized_path.insert(0, drive_or_unc + os.path.sep)
c4218ac3 2296 elif force and s[0] == os.path.sep:
2297 sanitized_path.insert(0, os.path.sep)
a2aaf4db
S
2298 return os.path.join(*sanitized_path)
2299
2300
17bcc626 2301def sanitize_url(url):
befa4708
S
2302 # Prepend protocol-less URLs with `http:` scheme in order to mitigate
2303 # the number of unwanted failures due to missing protocol
2304 if url.startswith('//'):
2305 return 'http:%s' % url
2306 # Fix some common typos seen so far
2307 COMMON_TYPOS = (
067aa17e 2308 # https://github.com/ytdl-org/youtube-dl/issues/15649
befa4708
S
2309 (r'^httpss://', r'https://'),
2310 # https://bx1.be/lives/direct-tv/
2311 (r'^rmtp([es]?)://', r'rtmp\1://'),
2312 )
2313 for mistake, fixup in COMMON_TYPOS:
2314 if re.match(mistake, url):
2315 return re.sub(mistake, fixup, url)
bc6b9bcd 2316 return url
17bcc626
S
2317
2318
5435dcf9
HH
2319def extract_basic_auth(url):
2320 parts = compat_urlparse.urlsplit(url)
2321 if parts.username is None:
2322 return url, None
2323 url = compat_urlparse.urlunsplit(parts._replace(netloc=(
2324 parts.hostname if parts.port is None
2325 else '%s:%d' % (parts.hostname, parts.port))))
2326 auth_payload = base64.b64encode(
2327 ('%s:%s' % (parts.username, parts.password or '')).encode('utf-8'))
2328 return url, 'Basic ' + auth_payload.decode('utf-8')
2329
2330
67dda517 2331def sanitized_Request(url, *args, **kwargs):
bc6b9bcd 2332 url, auth_header = extract_basic_auth(escape_url(sanitize_url(url)))
5435dcf9
HH
2333 if auth_header is not None:
2334 headers = args[1] if len(args) >= 2 else kwargs.setdefault('headers', {})
2335 headers['Authorization'] = auth_header
2336 return compat_urllib_request.Request(url, *args, **kwargs)
67dda517
S
2337
2338
51098426
S
2339def expand_path(s):
2340 """Expand shell variables and ~"""
2341 return os.path.expandvars(compat_expanduser(s))
2342
2343
d77c3dfd 2344def orderedSet(iterable):
59ae15a5
PH
2345 """ Remove all duplicates from the input iterable """
2346 res = []
2347 for el in iterable:
2348 if el not in res:
2349 res.append(el)
2350 return res
d77c3dfd 2351
912b38b4 2352
55b2f099 2353def _htmlentity_transform(entity_with_semicolon):
4e408e47 2354 """Transforms an HTML entity to a character."""
55b2f099
YCH
2355 entity = entity_with_semicolon[:-1]
2356
4e408e47
PH
2357 # Known non-numeric HTML entity
2358 if entity in compat_html_entities.name2codepoint:
2359 return compat_chr(compat_html_entities.name2codepoint[entity])
2360
55b2f099
YCH
2361 # TODO: HTML5 allows entities without a semicolon. For example,
2362 # '&Eacuteric' should be decoded as 'Éric'.
2363 if entity_with_semicolon in compat_html_entities_html5:
2364 return compat_html_entities_html5[entity_with_semicolon]
2365
91757b0f 2366 mobj = re.match(r'#(x[0-9a-fA-F]+|[0-9]+)', entity)
4e408e47
PH
2367 if mobj is not None:
2368 numstr = mobj.group(1)
28e614de 2369 if numstr.startswith('x'):
4e408e47 2370 base = 16
28e614de 2371 numstr = '0%s' % numstr
4e408e47
PH
2372 else:
2373 base = 10
067aa17e 2374 # See https://github.com/ytdl-org/youtube-dl/issues/7518
7aefc49c
S
2375 try:
2376 return compat_chr(int(numstr, base))
2377 except ValueError:
2378 pass
4e408e47
PH
2379
2380 # Unknown entity in name, return its literal representation
7a3f0c00 2381 return '&%s;' % entity
4e408e47
PH
2382
2383
d77c3dfd 2384def unescapeHTML(s):
912b38b4
PH
2385 if s is None:
2386 return None
2387 assert type(s) == compat_str
d77c3dfd 2388
4e408e47 2389 return re.sub(
95f3f7c2 2390 r'&([^&;]+;)', lambda m: _htmlentity_transform(m.group(1)), s)
d77c3dfd 2391
8bf48f23 2392
cdb19aa4 2393def escapeHTML(text):
2394 return (
2395 text
2396 .replace('&', '&amp;')
2397 .replace('<', '&lt;')
2398 .replace('>', '&gt;')
2399 .replace('"', '&quot;')
2400 .replace("'", '&#39;')
2401 )
2402
2403
f5b1bca9 2404def process_communicate_or_kill(p, *args, **kwargs):
2405 try:
2406 return p.communicate(*args, **kwargs)
2407 except BaseException: # Including KeyboardInterrupt
2408 p.kill()
2409 p.wait()
2410 raise
2411
2412
d3c93ec2 2413class Popen(subprocess.Popen):
2414 if sys.platform == 'win32':
2415 _startupinfo = subprocess.STARTUPINFO()
2416 _startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
2417 else:
2418 _startupinfo = None
2419
2420 def __init__(self, *args, **kwargs):
2421 super(Popen, self).__init__(*args, **kwargs, startupinfo=self._startupinfo)
2422
2423 def communicate_or_kill(self, *args, **kwargs):
2424 return process_communicate_or_kill(self, *args, **kwargs)
2425
2426
aa49acd1
S
2427def get_subprocess_encoding():
2428 if sys.platform == 'win32' and sys.getwindowsversion()[0] >= 5:
2429 # For subprocess calls, encode with locale encoding
2430 # Refer to http://stackoverflow.com/a/9951851/35070
2431 encoding = preferredencoding()
2432 else:
2433 encoding = sys.getfilesystemencoding()
2434 if encoding is None:
2435 encoding = 'utf-8'
2436 return encoding
2437
2438
8bf48f23 2439def encodeFilename(s, for_subprocess=False):
59ae15a5
PH
2440 """
2441 @param s The name of the file
2442 """
d77c3dfd 2443
8bf48f23 2444 assert type(s) == compat_str
d77c3dfd 2445
59ae15a5
PH
2446 # Python 3 has a Unicode API
2447 if sys.version_info >= (3, 0):
2448 return s
0f00efed 2449
aa49acd1
S
2450 # Pass '' directly to use Unicode APIs on Windows 2000 and up
2451 # (Detecting Windows NT 4 is tricky because 'major >= 4' would
2452 # match Windows 9x series as well. Besides, NT 4 is obsolete.)
2453 if not for_subprocess and sys.platform == 'win32' and sys.getwindowsversion()[0] >= 5:
2454 return s
2455
8ee239e9
YCH
2456 # Jython assumes filenames are Unicode strings though reported as Python 2.x compatible
2457 if sys.platform.startswith('java'):
2458 return s
2459
aa49acd1
S
2460 return s.encode(get_subprocess_encoding(), 'ignore')
2461
2462
2463def decodeFilename(b, for_subprocess=False):
2464
2465 if sys.version_info >= (3, 0):
2466 return b
2467
2468 if not isinstance(b, bytes):
2469 return b
2470
2471 return b.decode(get_subprocess_encoding(), 'ignore')
8bf48f23 2472
f07b74fc
PH
2473
2474def encodeArgument(s):
2475 if not isinstance(s, compat_str):
2476 # Legacy code that uses byte strings
2477 # Uncomment the following line after fixing all post processors
7af808a5 2478 # assert False, 'Internal error: %r should be of type %r, is %r' % (s, compat_str, type(s))
f07b74fc
PH
2479 s = s.decode('ascii')
2480 return encodeFilename(s, True)
2481
2482
aa49acd1
S
2483def decodeArgument(b):
2484 return decodeFilename(b, True)
2485
2486
8271226a
PH
2487def decodeOption(optval):
2488 if optval is None:
2489 return optval
2490 if isinstance(optval, bytes):
2491 optval = optval.decode(preferredencoding())
2492
2493 assert isinstance(optval, compat_str)
2494 return optval
1c256f70 2495
5f6a1245 2496
aa7785f8 2497_timetuple = collections.namedtuple('Time', ('hours', 'minutes', 'seconds', 'milliseconds'))
2498
2499
2500def timetuple_from_msec(msec):
2501 secs, msec = divmod(msec, 1000)
2502 mins, secs = divmod(secs, 60)
2503 hrs, mins = divmod(mins, 60)
2504 return _timetuple(hrs, mins, secs, msec)
2505
2506
cdb19aa4 2507def formatSeconds(secs, delim=':', msec=False):
aa7785f8 2508 time = timetuple_from_msec(secs * 1000)
2509 if time.hours:
2510 ret = '%d%s%02d%s%02d' % (time.hours, delim, time.minutes, delim, time.seconds)
2511 elif time.minutes:
2512 ret = '%d%s%02d' % (time.minutes, delim, time.seconds)
4539dd30 2513 else:
aa7785f8 2514 ret = '%d' % time.seconds
2515 return '%s.%03d' % (ret, time.milliseconds) if msec else ret
4539dd30 2516
a0ddb8a2 2517
77562778 2518def _ssl_load_windows_store_certs(ssl_context, storename):
2519 # Code adapted from _load_windows_store_certs in https://github.com/python/cpython/blob/main/Lib/ssl.py
2520 try:
2521 certs = [cert for cert, encoding, trust in ssl.enum_certificates(storename)
2522 if encoding == 'x509_asn' and (
2523 trust is True or ssl.Purpose.SERVER_AUTH.oid in trust)]
2524 except PermissionError:
2525 return
2526 for cert in certs:
a2366922 2527 try:
77562778 2528 ssl_context.load_verify_locations(cadata=cert)
2529 except ssl.SSLError:
a2366922
PH
2530 pass
2531
77562778 2532
2533def make_HTTPS_handler(params, **kwargs):
2534 opts_check_certificate = not params.get('nocheckcertificate')
2535 context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
2536 context.check_hostname = opts_check_certificate
2537 context.verify_mode = ssl.CERT_REQUIRED if opts_check_certificate else ssl.CERT_NONE
2538 if opts_check_certificate:
4e3d1898 2539 try:
2540 context.load_default_certs()
2541 # Work around the issue in load_default_certs when there are bad certificates. See:
2542 # https://github.com/yt-dlp/yt-dlp/issues/1060,
2543 # https://bugs.python.org/issue35665, https://bugs.python.org/issue45312
2544 except ssl.SSLError:
2545 # enum_certificates is not present in mingw python. See https://github.com/yt-dlp/yt-dlp/issues/1151
2546 if sys.platform == 'win32' and hasattr(ssl, 'enum_certificates'):
2547 # Create a new context to discard any certificates that were already loaded
2548 context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
2549 context.check_hostname, context.verify_mode = True, ssl.CERT_REQUIRED
2550 for storename in ('CA', 'ROOT'):
2551 _ssl_load_windows_store_certs(context, storename)
2552 context.set_default_verify_paths()
77562778 2553 return YoutubeDLHTTPSHandler(params, context=context, **kwargs)
ea6d901e 2554
732ea2f0 2555
5873d4cc 2556def bug_reports_message(before=';'):
08f2a92c 2557 if ytdl_is_updateable():
7a5c1cfe 2558 update_cmd = 'type yt-dlp -U to update'
08f2a92c 2559 else:
7a5c1cfe 2560 update_cmd = 'see https://github.com/yt-dlp/yt-dlp on how to update'
5873d4cc 2561 msg = 'please report this issue on https://github.com/yt-dlp/yt-dlp .'
08f2a92c 2562 msg += ' Make sure you are using the latest version; %s.' % update_cmd
7a5c1cfe 2563 msg += ' Be sure to call yt-dlp with the --verbose flag and include its complete output.'
5873d4cc
F
2564
2565 before = before.rstrip()
2566 if not before or before.endswith(('.', '!', '?')):
2567 msg = msg[0].title() + msg[1:]
2568
2569 return (before + ' ' if before else '') + msg
08f2a92c
JMF
2570
2571
bf5b9d85
PM
2572class YoutubeDLError(Exception):
2573 """Base exception for YoutubeDL errors."""
aa9369a2 2574 msg = None
2575
2576 def __init__(self, msg=None):
2577 if msg is not None:
2578 self.msg = msg
2579 elif self.msg is None:
2580 self.msg = type(self).__name__
2581 super().__init__(self.msg)
bf5b9d85
PM
2582
2583
3158150c 2584network_exceptions = [compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error]
2585if hasattr(ssl, 'CertificateError'):
2586 network_exceptions.append(ssl.CertificateError)
2587network_exceptions = tuple(network_exceptions)
2588
2589
bf5b9d85 2590class ExtractorError(YoutubeDLError):
1c256f70 2591 """Error during info extraction."""
5f6a1245 2592
1151c407 2593 def __init__(self, msg, tb=None, expected=False, cause=None, video_id=None, ie=None):
9a82b238 2594 """ tb, if given, is the original traceback (so that it can be printed out).
7a5c1cfe 2595 If expected is set, this is a normal error message and most likely not a bug in yt-dlp.
9a82b238 2596 """
3158150c 2597 if sys.exc_info()[0] in network_exceptions:
9a82b238 2598 expected = True
d5979c5d 2599
526d74ec 2600 self.msg = str(msg)
1c256f70 2601 self.traceback = tb
1151c407 2602 self.expected = expected
2eabb802 2603 self.cause = cause
d11271dd 2604 self.video_id = video_id
1151c407 2605 self.ie = ie
2606 self.exc_info = sys.exc_info() # preserve original exception
2607
2608 super(ExtractorError, self).__init__(''.join((
2609 format_field(ie, template='[%s] '),
2610 format_field(video_id, template='%s: '),
526d74ec 2611 self.msg,
1151c407 2612 format_field(cause, template=' (caused by %r)'),
2613 '' if expected else bug_reports_message())))
1c256f70 2614
01951dda
PH
2615 def format_traceback(self):
2616 if self.traceback is None:
2617 return None
28e614de 2618 return ''.join(traceback.format_tb(self.traceback))
01951dda 2619
1c256f70 2620
416c7fcb
PH
2621class UnsupportedError(ExtractorError):
2622 def __init__(self, url):
2623 super(UnsupportedError, self).__init__(
2624 'Unsupported URL: %s' % url, expected=True)
2625 self.url = url
2626
2627
55b3e45b
JMF
2628class RegexNotFoundError(ExtractorError):
2629 """Error when a regex didn't match"""
2630 pass
2631
2632
773f291d
S
2633class GeoRestrictedError(ExtractorError):
2634 """Geographic restriction Error exception.
2635
2636 This exception may be thrown when a video is not available from your
2637 geographic location due to geographic restrictions imposed by a website.
2638 """
b6e0c7d2 2639
0db3bae8 2640 def __init__(self, msg, countries=None, **kwargs):
2641 kwargs['expected'] = True
2642 super(GeoRestrictedError, self).__init__(msg, **kwargs)
773f291d
S
2643 self.countries = countries
2644
2645
bf5b9d85 2646class DownloadError(YoutubeDLError):
59ae15a5 2647 """Download Error exception.
d77c3dfd 2648
59ae15a5
PH
2649 This exception may be thrown by FileDownloader objects if they are not
2650 configured to continue on errors. They will contain the appropriate
2651 error message.
2652 """
5f6a1245 2653
8cc83b8d
FV
2654 def __init__(self, msg, exc_info=None):
2655 """ exc_info, if given, is the original exception that caused the trouble (as returned by sys.exc_info()). """
2656 super(DownloadError, self).__init__(msg)
2657 self.exc_info = exc_info
d77c3dfd
FV
2658
2659
498f5606 2660class EntryNotInPlaylist(YoutubeDLError):
2661 """Entry not in playlist exception.
2662
2663 This exception will be thrown by YoutubeDL when a requested entry
2664 is not found in the playlist info_dict
2665 """
aa9369a2 2666 msg = 'Entry not found in info'
498f5606 2667
2668
bf5b9d85 2669class SameFileError(YoutubeDLError):
59ae15a5 2670 """Same File exception.
d77c3dfd 2671
59ae15a5
PH
2672 This exception will be thrown by FileDownloader objects if they detect
2673 multiple files would have to be downloaded to the same file on disk.
2674 """
aa9369a2 2675 msg = 'Fixed output name but more than one file to download'
2676
2677 def __init__(self, filename=None):
2678 if filename is not None:
2679 self.msg += f': {filename}'
2680 super().__init__(self.msg)
d77c3dfd
FV
2681
2682
bf5b9d85 2683class PostProcessingError(YoutubeDLError):
59ae15a5 2684 """Post Processing exception.
d77c3dfd 2685
59ae15a5
PH
2686 This exception may be raised by PostProcessor's .run() method to
2687 indicate an error in the postprocessing task.
2688 """
5f6a1245 2689
5f6a1245 2690
48f79687 2691class DownloadCancelled(YoutubeDLError):
2692 """ Exception raised when the download queue should be interrupted """
2693 msg = 'The download was cancelled'
8b0d7497 2694
8b0d7497 2695
48f79687 2696class ExistingVideoReached(DownloadCancelled):
2697 """ --break-on-existing triggered """
2698 msg = 'Encountered a video that is already in the archive, stopping due to --break-on-existing'
8b0d7497 2699
48f79687 2700
2701class RejectedVideoReached(DownloadCancelled):
2702 """ --break-on-reject triggered """
2703 msg = 'Encountered a video that did not match filter, stopping due to --break-on-reject'
51d9739f 2704
2705
48f79687 2706class MaxDownloadsReached(DownloadCancelled):
59ae15a5 2707 """ --max-downloads limit has been reached. """
48f79687 2708 msg = 'Maximum number of downloads reached, stopping due to --max-downloads'
2709
2710
f2ebc5c7 2711class ReExtractInfo(YoutubeDLError):
2712 """ Video info needs to be re-extracted. """
2713
2714 def __init__(self, msg, expected=False):
2715 super().__init__(msg)
2716 self.expected = expected
2717
2718
2719class ThrottledDownload(ReExtractInfo):
48f79687 2720 """ Download speed below --throttled-rate. """
aa9369a2 2721 msg = 'The download speed is below throttle limit'
d77c3dfd 2722
43b22906 2723 def __init__(self):
2724 super().__init__(self.msg, expected=False)
f2ebc5c7 2725
d77c3dfd 2726
bf5b9d85 2727class UnavailableVideoError(YoutubeDLError):
59ae15a5 2728 """Unavailable Format exception.
d77c3dfd 2729
59ae15a5
PH
2730 This exception will be thrown when a video is requested
2731 in a format that is not available for that video.
2732 """
aa9369a2 2733 msg = 'Unable to download video'
2734
2735 def __init__(self, err=None):
2736 if err is not None:
2737 self.msg += f': {err}'
2738 super().__init__(self.msg)
d77c3dfd
FV
2739
2740
bf5b9d85 2741class ContentTooShortError(YoutubeDLError):
59ae15a5 2742 """Content Too Short exception.
d77c3dfd 2743
59ae15a5
PH
2744 This exception may be raised by FileDownloader objects when a file they
2745 download is too small for what the server announced first, indicating
2746 the connection was probably interrupted.
2747 """
d77c3dfd 2748
59ae15a5 2749 def __init__(self, downloaded, expected):
bf5b9d85
PM
2750 super(ContentTooShortError, self).__init__(
2751 'Downloaded {0} bytes, expected {1} bytes'.format(downloaded, expected)
2752 )
2c7ed247 2753 # Both in bytes
59ae15a5
PH
2754 self.downloaded = downloaded
2755 self.expected = expected
d77c3dfd 2756
5f6a1245 2757
bf5b9d85 2758class XAttrMetadataError(YoutubeDLError):
efa97bdc
YCH
2759 def __init__(self, code=None, msg='Unknown error'):
2760 super(XAttrMetadataError, self).__init__(msg)
2761 self.code = code
bd264412 2762 self.msg = msg
efa97bdc
YCH
2763
2764 # Parsing code and msg
3089bc74 2765 if (self.code in (errno.ENOSPC, errno.EDQUOT)
a0566bbf 2766 or 'No space left' in self.msg or 'Disk quota exceeded' in self.msg):
efa97bdc
YCH
2767 self.reason = 'NO_SPACE'
2768 elif self.code == errno.E2BIG or 'Argument list too long' in self.msg:
2769 self.reason = 'VALUE_TOO_LONG'
2770 else:
2771 self.reason = 'NOT_SUPPORTED'
2772
2773
bf5b9d85 2774class XAttrUnavailableError(YoutubeDLError):
efa97bdc
YCH
2775 pass
2776
2777
c5a59d93 2778def _create_http_connection(ydl_handler, http_class, is_https, *args, **kwargs):
e5e78797
S
2779 # Working around python 2 bug (see http://bugs.python.org/issue17849) by limiting
2780 # expected HTTP responses to meet HTTP/1.0 or later (see also
067aa17e 2781 # https://github.com/ytdl-org/youtube-dl/issues/6727)
e5e78797 2782 if sys.version_info < (3, 0):
65220c3b
S
2783 kwargs['strict'] = True
2784 hc = http_class(*args, **compat_kwargs(kwargs))
be4a824d 2785 source_address = ydl_handler._params.get('source_address')
8959018a 2786
be4a824d 2787 if source_address is not None:
8959018a
AU
2788 # This is to workaround _create_connection() from socket where it will try all
2789 # address data from getaddrinfo() including IPv6. This filters the result from
2790 # getaddrinfo() based on the source_address value.
2791 # This is based on the cpython socket.create_connection() function.
2792 # https://github.com/python/cpython/blob/master/Lib/socket.py#L691
2793 def _create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None):
2794 host, port = address
2795 err = None
2796 addrs = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
9e21e6d9
S
2797 af = socket.AF_INET if '.' in source_address[0] else socket.AF_INET6
2798 ip_addrs = [addr for addr in addrs if addr[0] == af]
2799 if addrs and not ip_addrs:
2800 ip_version = 'v4' if af == socket.AF_INET else 'v6'
2801 raise socket.error(
2802 "No remote IP%s addresses available for connect, can't use '%s' as source address"
2803 % (ip_version, source_address[0]))
8959018a
AU
2804 for res in ip_addrs:
2805 af, socktype, proto, canonname, sa = res
2806 sock = None
2807 try:
2808 sock = socket.socket(af, socktype, proto)
2809 if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
2810 sock.settimeout(timeout)
2811 sock.bind(source_address)
2812 sock.connect(sa)
2813 err = None # Explicitly break reference cycle
2814 return sock
2815 except socket.error as _:
2816 err = _
2817 if sock is not None:
2818 sock.close()
2819 if err is not None:
2820 raise err
2821 else:
9e21e6d9
S
2822 raise socket.error('getaddrinfo returns an empty list')
2823 if hasattr(hc, '_create_connection'):
2824 hc._create_connection = _create_connection
be4a824d
PH
2825 sa = (source_address, 0)
2826 if hasattr(hc, 'source_address'): # Python 2.7+
2827 hc.source_address = sa
2828 else: # Python 2.6
2829 def _hc_connect(self, *args, **kwargs):
9e21e6d9 2830 sock = _create_connection(
be4a824d
PH
2831 (self.host, self.port), self.timeout, sa)
2832 if is_https:
d7932313
PH
2833 self.sock = ssl.wrap_socket(
2834 sock, self.key_file, self.cert_file,
2835 ssl_version=ssl.PROTOCOL_TLSv1)
be4a824d
PH
2836 else:
2837 self.sock = sock
2838 hc.connect = functools.partial(_hc_connect, hc)
2839
2840 return hc
2841
2842
87f0e62d 2843def handle_youtubedl_headers(headers):
992fc9d6
YCH
2844 filtered_headers = headers
2845
2846 if 'Youtubedl-no-compression' in filtered_headers:
2847 filtered_headers = dict((k, v) for k, v in filtered_headers.items() if k.lower() != 'accept-encoding')
87f0e62d 2848 del filtered_headers['Youtubedl-no-compression']
87f0e62d 2849
992fc9d6 2850 return filtered_headers
87f0e62d
YCH
2851
2852
acebc9cd 2853class YoutubeDLHandler(compat_urllib_request.HTTPHandler):
59ae15a5
PH
2854 """Handler for HTTP requests and responses.
2855
2856 This class, when installed with an OpenerDirector, automatically adds
2857 the standard headers to every HTTP request and handles gzipped and
2858 deflated responses from web servers. If compression is to be avoided in
2859 a particular request, the original request in the program code only has
0424ec30 2860 to include the HTTP header "Youtubedl-no-compression", which will be
59ae15a5
PH
2861 removed before making the real request.
2862
2863 Part of this code was copied from:
2864
2865 http://techknack.net/python-urllib2-handlers/
2866
2867 Andrew Rowls, the author of that code, agreed to release it to the
2868 public domain.
2869 """
2870
be4a824d
PH
2871 def __init__(self, params, *args, **kwargs):
2872 compat_urllib_request.HTTPHandler.__init__(self, *args, **kwargs)
2873 self._params = params
2874
2875 def http_open(self, req):
71aff188
YCH
2876 conn_class = compat_http_client.HTTPConnection
2877
2878 socks_proxy = req.headers.get('Ytdl-socks-proxy')
2879 if socks_proxy:
2880 conn_class = make_socks_conn_class(conn_class, socks_proxy)
2881 del req.headers['Ytdl-socks-proxy']
2882
be4a824d 2883 return self.do_open(functools.partial(
71aff188 2884 _create_http_connection, self, conn_class, False),
be4a824d
PH
2885 req)
2886
59ae15a5
PH
2887 @staticmethod
2888 def deflate(data):
fc2119f2 2889 if not data:
2890 return data
59ae15a5
PH
2891 try:
2892 return zlib.decompress(data, -zlib.MAX_WBITS)
2893 except zlib.error:
2894 return zlib.decompress(data)
2895
acebc9cd 2896 def http_request(self, req):
51f267d9
S
2897 # According to RFC 3986, URLs can not contain non-ASCII characters, however this is not
2898 # always respected by websites, some tend to give out URLs with non percent-encoded
2899 # non-ASCII characters (see telemb.py, ard.py [#3412])
2900 # urllib chokes on URLs with non-ASCII characters (see http://bugs.python.org/issue3991)
2901 # To work around aforementioned issue we will replace request's original URL with
2902 # percent-encoded one
2903 # Since redirects are also affected (e.g. http://www.southpark.de/alle-episoden/s18e09)
2904 # the code of this workaround has been moved here from YoutubeDL.urlopen()
2905 url = req.get_full_url()
2906 url_escaped = escape_url(url)
2907
2908 # Substitute URL if any change after escaping
2909 if url != url_escaped:
15d260eb 2910 req = update_Request(req, url=url_escaped)
51f267d9 2911
33ac271b 2912 for h, v in std_headers.items():
3d5f7a39
JK
2913 # Capitalize is needed because of Python bug 2275: http://bugs.python.org/issue2275
2914 # The dict keys are capitalized because of this bug by urllib
2915 if h.capitalize() not in req.headers:
33ac271b 2916 req.add_header(h, v)
87f0e62d
YCH
2917
2918 req.headers = handle_youtubedl_headers(req.headers)
989b4b2b
PH
2919
2920 if sys.version_info < (2, 7) and '#' in req.get_full_url():
2921 # Python 2.6 is brain-dead when it comes to fragments
2922 req._Request__original = req._Request__original.partition('#')[0]
2923 req._Request__r_type = req._Request__r_type.partition('#')[0]
2924
59ae15a5
PH
2925 return req
2926
acebc9cd 2927 def http_response(self, req, resp):
59ae15a5
PH
2928 old_resp = resp
2929 # gzip
2930 if resp.headers.get('Content-encoding', '') == 'gzip':
aa3e9507
PH
2931 content = resp.read()
2932 gz = gzip.GzipFile(fileobj=io.BytesIO(content), mode='rb')
2933 try:
2934 uncompressed = io.BytesIO(gz.read())
2935 except IOError as original_ioerror:
2936 # There may be junk add the end of the file
2937 # See http://stackoverflow.com/q/4928560/35070 for details
2938 for i in range(1, 1024):
2939 try:
2940 gz = gzip.GzipFile(fileobj=io.BytesIO(content[:-i]), mode='rb')
2941 uncompressed = io.BytesIO(gz.read())
2942 except IOError:
2943 continue
2944 break
2945 else:
2946 raise original_ioerror
b407d853 2947 resp = compat_urllib_request.addinfourl(uncompressed, old_resp.headers, old_resp.url, old_resp.code)
59ae15a5 2948 resp.msg = old_resp.msg
c047270c 2949 del resp.headers['Content-encoding']
59ae15a5
PH
2950 # deflate
2951 if resp.headers.get('Content-encoding', '') == 'deflate':
2952 gz = io.BytesIO(self.deflate(resp.read()))
b407d853 2953 resp = compat_urllib_request.addinfourl(gz, old_resp.headers, old_resp.url, old_resp.code)
59ae15a5 2954 resp.msg = old_resp.msg
c047270c 2955 del resp.headers['Content-encoding']
ad729172 2956 # Percent-encode redirect URL of Location HTTP header to satisfy RFC 3986 (see
067aa17e 2957 # https://github.com/ytdl-org/youtube-dl/issues/6457).
5a4d9ddb
S
2958 if 300 <= resp.code < 400:
2959 location = resp.headers.get('Location')
2960 if location:
2961 # As of RFC 2616 default charset is iso-8859-1 that is respected by python 3
2962 if sys.version_info >= (3, 0):
2963 location = location.encode('iso-8859-1').decode('utf-8')
0ea59007
YCH
2964 else:
2965 location = location.decode('utf-8')
5a4d9ddb
S
2966 location_escaped = escape_url(location)
2967 if location != location_escaped:
2968 del resp.headers['Location']
9a4aec8b
YCH
2969 if sys.version_info < (3, 0):
2970 location_escaped = location_escaped.encode('utf-8')
5a4d9ddb 2971 resp.headers['Location'] = location_escaped
59ae15a5 2972 return resp
0f8d03f8 2973
acebc9cd
PH
2974 https_request = http_request
2975 https_response = http_response
bf50b038 2976
5de90176 2977
71aff188
YCH
2978def make_socks_conn_class(base_class, socks_proxy):
2979 assert issubclass(base_class, (
2980 compat_http_client.HTTPConnection, compat_http_client.HTTPSConnection))
2981
2982 url_components = compat_urlparse.urlparse(socks_proxy)
2983 if url_components.scheme.lower() == 'socks5':
2984 socks_type = ProxyType.SOCKS5
2985 elif url_components.scheme.lower() in ('socks', 'socks4'):
2986 socks_type = ProxyType.SOCKS4
51fb4995
YCH
2987 elif url_components.scheme.lower() == 'socks4a':
2988 socks_type = ProxyType.SOCKS4A
71aff188 2989
cdd94c2e
YCH
2990 def unquote_if_non_empty(s):
2991 if not s:
2992 return s
2993 return compat_urllib_parse_unquote_plus(s)
2994
71aff188
YCH
2995 proxy_args = (
2996 socks_type,
2997 url_components.hostname, url_components.port or 1080,
2998 True, # Remote DNS
cdd94c2e
YCH
2999 unquote_if_non_empty(url_components.username),
3000 unquote_if_non_empty(url_components.password),
71aff188
YCH
3001 )
3002
3003 class SocksConnection(base_class):
3004 def connect(self):
3005 self.sock = sockssocket()
3006 self.sock.setproxy(*proxy_args)
3007 if type(self.timeout) in (int, float):
3008 self.sock.settimeout(self.timeout)
3009 self.sock.connect((self.host, self.port))
3010
3011 if isinstance(self, compat_http_client.HTTPSConnection):
3012 if hasattr(self, '_context'): # Python > 2.6
3013 self.sock = self._context.wrap_socket(
3014 self.sock, server_hostname=self.host)
3015 else:
3016 self.sock = ssl.wrap_socket(self.sock)
3017
3018 return SocksConnection
3019
3020
be4a824d
PH
3021class YoutubeDLHTTPSHandler(compat_urllib_request.HTTPSHandler):
3022 def __init__(self, params, https_conn_class=None, *args, **kwargs):
3023 compat_urllib_request.HTTPSHandler.__init__(self, *args, **kwargs)
3024 self._https_conn_class = https_conn_class or compat_http_client.HTTPSConnection
3025 self._params = params
3026
3027 def https_open(self, req):
4f264c02 3028 kwargs = {}
71aff188
YCH
3029 conn_class = self._https_conn_class
3030
4f264c02
JMF
3031 if hasattr(self, '_context'): # python > 2.6
3032 kwargs['context'] = self._context
3033 if hasattr(self, '_check_hostname'): # python 3.x
3034 kwargs['check_hostname'] = self._check_hostname
71aff188
YCH
3035
3036 socks_proxy = req.headers.get('Ytdl-socks-proxy')
3037 if socks_proxy:
3038 conn_class = make_socks_conn_class(conn_class, socks_proxy)
3039 del req.headers['Ytdl-socks-proxy']
3040
be4a824d 3041 return self.do_open(functools.partial(
71aff188 3042 _create_http_connection, self, conn_class, True),
4f264c02 3043 req, **kwargs)
be4a824d
PH
3044
3045
1bab3437 3046class YoutubeDLCookieJar(compat_cookiejar.MozillaCookieJar):
f1a8511f
S
3047 """
3048 See [1] for cookie file format.
3049
3050 1. https://curl.haxx.se/docs/http-cookies.html
3051 """
e7e62441 3052 _HTTPONLY_PREFIX = '#HttpOnly_'
c380cc28
S
3053 _ENTRY_LEN = 7
3054 _HEADER = '''# Netscape HTTP Cookie File
7a5c1cfe 3055# This file is generated by yt-dlp. Do not edit.
c380cc28
S
3056
3057'''
3058 _CookieFileEntry = collections.namedtuple(
3059 'CookieFileEntry',
3060 ('domain_name', 'include_subdomains', 'path', 'https_only', 'expires_at', 'name', 'value'))
e7e62441 3061
1bab3437 3062 def save(self, filename=None, ignore_discard=False, ignore_expires=False):
c380cc28
S
3063 """
3064 Save cookies to a file.
3065
3066 Most of the code is taken from CPython 3.8 and slightly adapted
3067 to support cookie files with UTF-8 in both python 2 and 3.
3068 """
3069 if filename is None:
3070 if self.filename is not None:
3071 filename = self.filename
3072 else:
3073 raise ValueError(compat_cookiejar.MISSING_FILENAME_TEXT)
3074
1bab3437
S
3075 # Store session cookies with `expires` set to 0 instead of an empty
3076 # string
3077 for cookie in self:
3078 if cookie.expires is None:
3079 cookie.expires = 0
c380cc28
S
3080
3081 with io.open(filename, 'w', encoding='utf-8') as f:
3082 f.write(self._HEADER)
3083 now = time.time()
3084 for cookie in self:
3085 if not ignore_discard and cookie.discard:
3086 continue
3087 if not ignore_expires and cookie.is_expired(now):
3088 continue
3089 if cookie.secure:
3090 secure = 'TRUE'
3091 else:
3092 secure = 'FALSE'
3093 if cookie.domain.startswith('.'):
3094 initial_dot = 'TRUE'
3095 else:
3096 initial_dot = 'FALSE'
3097 if cookie.expires is not None:
3098 expires = compat_str(cookie.expires)
3099 else:
3100 expires = ''
3101 if cookie.value is None:
3102 # cookies.txt regards 'Set-Cookie: foo' as a cookie
3103 # with no name, whereas http.cookiejar regards it as a
3104 # cookie with no value.
3105 name = ''
3106 value = cookie.name
3107 else:
3108 name = cookie.name
3109 value = cookie.value
3110 f.write(
3111 '\t'.join([cookie.domain, initial_dot, cookie.path,
3112 secure, expires, name, value]) + '\n')
1bab3437
S
3113
3114 def load(self, filename=None, ignore_discard=False, ignore_expires=False):
e7e62441 3115 """Load cookies from a file."""
3116 if filename is None:
3117 if self.filename is not None:
3118 filename = self.filename
3119 else:
3120 raise ValueError(compat_cookiejar.MISSING_FILENAME_TEXT)
3121
c380cc28
S
3122 def prepare_line(line):
3123 if line.startswith(self._HTTPONLY_PREFIX):
3124 line = line[len(self._HTTPONLY_PREFIX):]
3125 # comments and empty lines are fine
3126 if line.startswith('#') or not line.strip():
3127 return line
3128 cookie_list = line.split('\t')
3129 if len(cookie_list) != self._ENTRY_LEN:
3130 raise compat_cookiejar.LoadError('invalid length %d' % len(cookie_list))
3131 cookie = self._CookieFileEntry(*cookie_list)
3132 if cookie.expires_at and not cookie.expires_at.isdigit():
3133 raise compat_cookiejar.LoadError('invalid expires at %s' % cookie.expires_at)
3134 return line
3135
e7e62441 3136 cf = io.StringIO()
c380cc28 3137 with io.open(filename, encoding='utf-8') as f:
e7e62441 3138 for line in f:
c380cc28
S
3139 try:
3140 cf.write(prepare_line(line))
3141 except compat_cookiejar.LoadError as e:
3142 write_string(
3143 'WARNING: skipping cookie file entry due to %s: %r\n'
3144 % (e, line), sys.stderr)
3145 continue
e7e62441 3146 cf.seek(0)
3147 self._really_load(cf, filename, ignore_discard, ignore_expires)
1bab3437
S
3148 # Session cookies are denoted by either `expires` field set to
3149 # an empty string or 0. MozillaCookieJar only recognizes the former
3150 # (see [1]). So we need force the latter to be recognized as session
3151 # cookies on our own.
3152 # Session cookies may be important for cookies-based authentication,
3153 # e.g. usually, when user does not check 'Remember me' check box while
3154 # logging in on a site, some important cookies are stored as session
3155 # cookies so that not recognizing them will result in failed login.
3156 # 1. https://bugs.python.org/issue17164
3157 for cookie in self:
3158 # Treat `expires=0` cookies as session cookies
3159 if cookie.expires == 0:
3160 cookie.expires = None
3161 cookie.discard = True
3162
3163
a6420bf5
S
3164class YoutubeDLCookieProcessor(compat_urllib_request.HTTPCookieProcessor):
3165 def __init__(self, cookiejar=None):
3166 compat_urllib_request.HTTPCookieProcessor.__init__(self, cookiejar)
3167
3168 def http_response(self, request, response):
3169 # Python 2 will choke on next HTTP request in row if there are non-ASCII
3170 # characters in Set-Cookie HTTP header of last response (see
067aa17e 3171 # https://github.com/ytdl-org/youtube-dl/issues/6769).
a6420bf5
S
3172 # In order to at least prevent crashing we will percent encode Set-Cookie
3173 # header before HTTPCookieProcessor starts processing it.
e28034c5
S
3174 # if sys.version_info < (3, 0) and response.headers:
3175 # for set_cookie_header in ('Set-Cookie', 'Set-Cookie2'):
3176 # set_cookie = response.headers.get(set_cookie_header)
3177 # if set_cookie:
3178 # set_cookie_escaped = compat_urllib_parse.quote(set_cookie, b"%/;:@&=+$,!~*'()?#[] ")
3179 # if set_cookie != set_cookie_escaped:
3180 # del response.headers[set_cookie_header]
3181 # response.headers[set_cookie_header] = set_cookie_escaped
a6420bf5
S
3182 return compat_urllib_request.HTTPCookieProcessor.http_response(self, request, response)
3183
f5fa042c 3184 https_request = compat_urllib_request.HTTPCookieProcessor.http_request
a6420bf5
S
3185 https_response = http_response
3186
3187
fca6dba8 3188class YoutubeDLRedirectHandler(compat_urllib_request.HTTPRedirectHandler):
201c1459 3189 """YoutubeDL redirect handler
3190
3191 The code is based on HTTPRedirectHandler implementation from CPython [1].
3192
3193 This redirect handler solves two issues:
3194 - ensures redirect URL is always unicode under python 2
3195 - introduces support for experimental HTTP response status code
3196 308 Permanent Redirect [2] used by some sites [3]
3197
3198 1. https://github.com/python/cpython/blob/master/Lib/urllib/request.py
3199 2. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/308
3200 3. https://github.com/ytdl-org/youtube-dl/issues/28768
3201 """
3202
3203 http_error_301 = http_error_303 = http_error_307 = http_error_308 = compat_urllib_request.HTTPRedirectHandler.http_error_302
3204
3205 def redirect_request(self, req, fp, code, msg, headers, newurl):
3206 """Return a Request or None in response to a redirect.
3207
3208 This is called by the http_error_30x methods when a
3209 redirection response is received. If a redirection should
3210 take place, return a new Request to allow http_error_30x to
3211 perform the redirect. Otherwise, raise HTTPError if no-one
3212 else should try to handle this url. Return None if you can't
3213 but another Handler might.
3214 """
3215 m = req.get_method()
3216 if (not (code in (301, 302, 303, 307, 308) and m in ("GET", "HEAD")
3217 or code in (301, 302, 303) and m == "POST")):
3218 raise compat_HTTPError(req.full_url, code, msg, headers, fp)
3219 # Strictly (according to RFC 2616), 301 or 302 in response to
3220 # a POST MUST NOT cause a redirection without confirmation
3221 # from the user (of urllib.request, in this case). In practice,
3222 # essentially all clients do redirect in this case, so we do
3223 # the same.
3224
3225 # On python 2 urlh.geturl() may sometimes return redirect URL
3226 # as byte string instead of unicode. This workaround allows
3227 # to force it always return unicode.
3228 if sys.version_info[0] < 3:
3229 newurl = compat_str(newurl)
3230
3231 # Be conciliant with URIs containing a space. This is mainly
3232 # redundant with the more complete encoding done in http_error_302(),
3233 # but it is kept for compatibility with other callers.
3234 newurl = newurl.replace(' ', '%20')
3235
3236 CONTENT_HEADERS = ("content-length", "content-type")
3237 # NB: don't use dict comprehension for python 2.6 compatibility
3238 newheaders = dict((k, v) for k, v in req.headers.items()
3239 if k.lower() not in CONTENT_HEADERS)
3240 return compat_urllib_request.Request(
3241 newurl, headers=newheaders, origin_req_host=req.origin_req_host,
3242 unverifiable=True)
fca6dba8
S
3243
3244
46f59e89
S
3245def extract_timezone(date_str):
3246 m = re.search(
f137e4c2 3247 r'''(?x)
3248 ^.{8,}? # >=8 char non-TZ prefix, if present
3249 (?P<tz>Z| # just the UTC Z, or
3250 (?:(?<=.\b\d{4}|\b\d{2}:\d\d)| # preceded by 4 digits or hh:mm or
3251 (?<!.\b[a-zA-Z]{3}|[a-zA-Z]{4}|..\b\d\d)) # not preceded by 3 alpha word or >= 4 alpha or 2 digits
3252 [ ]? # optional space
3253 (?P<sign>\+|-) # +/-
3254 (?P<hours>[0-9]{2}):?(?P<minutes>[0-9]{2}) # hh[:]mm
3255 $)
3256 ''', date_str)
46f59e89
S
3257 if not m:
3258 timezone = datetime.timedelta()
3259 else:
3260 date_str = date_str[:-len(m.group('tz'))]
3261 if not m.group('sign'):
3262 timezone = datetime.timedelta()
3263 else:
3264 sign = 1 if m.group('sign') == '+' else -1
3265 timezone = datetime.timedelta(
3266 hours=sign * int(m.group('hours')),
3267 minutes=sign * int(m.group('minutes')))
3268 return timezone, date_str
3269
3270
08b38d54 3271def parse_iso8601(date_str, delimiter='T', timezone=None):
912b38b4
PH
3272 """ Return a UNIX timestamp from the given date """
3273
3274 if date_str is None:
3275 return None
3276
52c3a6e4
S
3277 date_str = re.sub(r'\.[0-9]+', '', date_str)
3278
08b38d54 3279 if timezone is None:
46f59e89
S
3280 timezone, date_str = extract_timezone(date_str)
3281
52c3a6e4
S
3282 try:
3283 date_format = '%Y-%m-%d{0}%H:%M:%S'.format(delimiter)
3284 dt = datetime.datetime.strptime(date_str, date_format) - timezone
3285 return calendar.timegm(dt.timetuple())
3286 except ValueError:
3287 pass
912b38b4
PH
3288
3289
46f59e89
S
3290def date_formats(day_first=True):
3291 return DATE_FORMATS_DAY_FIRST if day_first else DATE_FORMATS_MONTH_FIRST
3292
3293
42bdd9d0 3294def unified_strdate(date_str, day_first=True):
bf50b038 3295 """Return a string with the date in the format YYYYMMDD"""
64e7ad60
PH
3296
3297 if date_str is None:
3298 return None
bf50b038 3299 upload_date = None
5f6a1245 3300 # Replace commas
026fcc04 3301 date_str = date_str.replace(',', ' ')
42bdd9d0 3302 # Remove AM/PM + timezone
9bb8e0a3 3303 date_str = re.sub(r'(?i)\s*(?:AM|PM)(?:\s+[A-Z]+)?', '', date_str)
46f59e89 3304 _, date_str = extract_timezone(date_str)
42bdd9d0 3305
46f59e89 3306 for expression in date_formats(day_first):
bf50b038
JMF
3307 try:
3308 upload_date = datetime.datetime.strptime(date_str, expression).strftime('%Y%m%d')
5de90176 3309 except ValueError:
bf50b038 3310 pass
42393ce2
PH
3311 if upload_date is None:
3312 timetuple = email.utils.parsedate_tz(date_str)
3313 if timetuple:
c6b9cf05
S
3314 try:
3315 upload_date = datetime.datetime(*timetuple[:6]).strftime('%Y%m%d')
3316 except ValueError:
3317 pass
6a750402
JMF
3318 if upload_date is not None:
3319 return compat_str(upload_date)
bf50b038 3320
5f6a1245 3321
46f59e89
S
3322def unified_timestamp(date_str, day_first=True):
3323 if date_str is None:
3324 return None
3325
2ae2ffda 3326 date_str = re.sub(r'[,|]', '', date_str)
46f59e89 3327
7dc2a74e 3328 pm_delta = 12 if re.search(r'(?i)PM', date_str) else 0
46f59e89
S
3329 timezone, date_str = extract_timezone(date_str)
3330
3331 # Remove AM/PM + timezone
3332 date_str = re.sub(r'(?i)\s*(?:AM|PM)(?:\s+[A-Z]+)?', '', date_str)
3333
deef3195
S
3334 # Remove unrecognized timezones from ISO 8601 alike timestamps
3335 m = re.search(r'\d{1,2}:\d{1,2}(?:\.\d+)?(?P<tz>\s*[A-Z]+)$', date_str)
3336 if m:
3337 date_str = date_str[:-len(m.group('tz'))]
3338
f226880c
PH
3339 # Python only supports microseconds, so remove nanoseconds
3340 m = re.search(r'^([0-9]{4,}-[0-9]{1,2}-[0-9]{1,2}T[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}\.[0-9]{6})[0-9]+$', date_str)
3341 if m:
3342 date_str = m.group(1)
3343
46f59e89
S
3344 for expression in date_formats(day_first):
3345 try:
7dc2a74e 3346 dt = datetime.datetime.strptime(date_str, expression) - timezone + datetime.timedelta(hours=pm_delta)
46f59e89
S
3347 return calendar.timegm(dt.timetuple())
3348 except ValueError:
3349 pass
3350 timetuple = email.utils.parsedate_tz(date_str)
3351 if timetuple:
7dc2a74e 3352 return calendar.timegm(timetuple) + pm_delta * 3600
46f59e89
S
3353
3354
28e614de 3355def determine_ext(url, default_ext='unknown_video'):
85750f89 3356 if url is None or '.' not in url:
f4776371 3357 return default_ext
9cb9a5df 3358 guess = url.partition('?')[0].rpartition('.')[2]
73e79f2a
PH
3359 if re.match(r'^[A-Za-z0-9]+$', guess):
3360 return guess
a7aaa398
S
3361 # Try extract ext from URLs like http://example.com/foo/bar.mp4/?download
3362 elif guess.rstrip('/') in KNOWN_EXTENSIONS:
9cb9a5df 3363 return guess.rstrip('/')
73e79f2a 3364 else:
cbdbb766 3365 return default_ext
73e79f2a 3366
5f6a1245 3367
824fa511
S
3368def subtitles_filename(filename, sub_lang, sub_format, expected_real_ext=None):
3369 return replace_extension(filename, sub_lang + '.' + sub_format, expected_real_ext)
d4051a8e 3370
5f6a1245 3371
9e62f283 3372def datetime_from_str(date_str, precision='auto', format='%Y%m%d'):
37254abc
JMF
3373 """
3374 Return a datetime object from a string in the format YYYYMMDD or
9e62f283 3375 (now|today|date)[+-][0-9](microsecond|second|minute|hour|day|week|month|year)(s)?
3376
3377 format: string date format used to return datetime object from
3378 precision: round the time portion of a datetime object.
3379 auto|microsecond|second|minute|hour|day.
3380 auto: round to the unit provided in date_str (if applicable).
3381 """
3382 auto_precision = False
3383 if precision == 'auto':
3384 auto_precision = True
3385 precision = 'microsecond'
3386 today = datetime_round(datetime.datetime.now(), precision)
f8795e10 3387 if date_str in ('now', 'today'):
37254abc 3388 return today
f8795e10
PH
3389 if date_str == 'yesterday':
3390 return today - datetime.timedelta(days=1)
9e62f283 3391 match = re.match(
3392 r'(?P<start>.+)(?P<sign>[+-])(?P<time>\d+)(?P<unit>microsecond|second|minute|hour|day|week|month|year)(s)?',
3393 date_str)
37254abc 3394 if match is not None:
9e62f283 3395 start_time = datetime_from_str(match.group('start'), precision, format)
3396 time = int(match.group('time')) * (-1 if match.group('sign') == '-' else 1)
37254abc 3397 unit = match.group('unit')
9e62f283 3398 if unit == 'month' or unit == 'year':
3399 new_date = datetime_add_months(start_time, time * 12 if unit == 'year' else time)
37254abc 3400 unit = 'day'
9e62f283 3401 else:
3402 if unit == 'week':
3403 unit = 'day'
3404 time *= 7
3405 delta = datetime.timedelta(**{unit + 's': time})
3406 new_date = start_time + delta
3407 if auto_precision:
3408 return datetime_round(new_date, unit)
3409 return new_date
3410
3411 return datetime_round(datetime.datetime.strptime(date_str, format), precision)
3412
3413
3414def date_from_str(date_str, format='%Y%m%d'):
3415 """
3416 Return a datetime object from a string in the format YYYYMMDD or
3417 (now|today|date)[+-][0-9](microsecond|second|minute|hour|day|week|month|year)(s)?
3418
3419 format: string date format used to return datetime object from
3420 """
3421 return datetime_from_str(date_str, precision='microsecond', format=format).date()
3422
3423
3424def datetime_add_months(dt, months):
3425 """Increment/Decrement a datetime object by months."""
3426 month = dt.month + months - 1
3427 year = dt.year + month // 12
3428 month = month % 12 + 1
3429 day = min(dt.day, calendar.monthrange(year, month)[1])
3430 return dt.replace(year, month, day)
3431
3432
3433def datetime_round(dt, precision='day'):
3434 """
3435 Round a datetime object's time to a specific precision
3436 """
3437 if precision == 'microsecond':
3438 return dt
3439
3440 unit_seconds = {
3441 'day': 86400,
3442 'hour': 3600,
3443 'minute': 60,
3444 'second': 1,
3445 }
3446 roundto = lambda x, n: ((x + n / 2) // n) * n
3447 timestamp = calendar.timegm(dt.timetuple())
3448 return datetime.datetime.utcfromtimestamp(roundto(timestamp, unit_seconds[precision]))
5f6a1245
JW
3449
3450
e63fc1be 3451def hyphenate_date(date_str):
3452 """
3453 Convert a date in 'YYYYMMDD' format to 'YYYY-MM-DD' format"""
3454 match = re.match(r'^(\d\d\d\d)(\d\d)(\d\d)$', date_str)
3455 if match is not None:
3456 return '-'.join(match.groups())
3457 else:
3458 return date_str
3459
5f6a1245 3460
bd558525
JMF
3461class DateRange(object):
3462 """Represents a time interval between two dates"""
5f6a1245 3463
bd558525
JMF
3464 def __init__(self, start=None, end=None):
3465 """start and end must be strings in the format accepted by date"""
3466 if start is not None:
3467 self.start = date_from_str(start)
3468 else:
3469 self.start = datetime.datetime.min.date()
3470 if end is not None:
3471 self.end = date_from_str(end)
3472 else:
3473 self.end = datetime.datetime.max.date()
37254abc 3474 if self.start > self.end:
bd558525 3475 raise ValueError('Date range: "%s" , the start date must be before the end date' % self)
5f6a1245 3476
bd558525
JMF
3477 @classmethod
3478 def day(cls, day):
3479 """Returns a range that only contains the given day"""
5f6a1245
JW
3480 return cls(day, day)
3481
bd558525
JMF
3482 def __contains__(self, date):
3483 """Check if the date is in the range"""
37254abc
JMF
3484 if not isinstance(date, datetime.date):
3485 date = date_from_str(date)
3486 return self.start <= date <= self.end
5f6a1245 3487
bd558525 3488 def __str__(self):
5f6a1245 3489 return '%s - %s' % (self.start.isoformat(), self.end.isoformat())
c496ca96
PH
3490
3491
3492def platform_name():
3493 """ Returns the platform name as a compat_str """
3494 res = platform.platform()
3495 if isinstance(res, bytes):
3496 res = res.decode(preferredencoding())
3497
3498 assert isinstance(res, compat_str)
3499 return res
c257baff
PH
3500
3501
49fa4d9a
N
3502def get_windows_version():
3503 ''' Get Windows version. None if it's not running on Windows '''
3504 if compat_os_name == 'nt':
3505 return version_tuple(platform.win32_ver()[1])
3506 else:
3507 return None
3508
3509
b58ddb32
PH
3510def _windows_write_string(s, out):
3511 """ Returns True if the string was written using special methods,
3512 False if it has yet to be written out."""
3513 # Adapted from http://stackoverflow.com/a/3259271/35070
3514
b58ddb32
PH
3515 import ctypes.wintypes
3516
3517 WIN_OUTPUT_IDS = {
3518 1: -11,
3519 2: -12,
3520 }
3521
a383a98a
PH
3522 try:
3523 fileno = out.fileno()
3524 except AttributeError:
3525 # If the output stream doesn't have a fileno, it's virtual
3526 return False
aa42e873
PH
3527 except io.UnsupportedOperation:
3528 # Some strange Windows pseudo files?
3529 return False
b58ddb32
PH
3530 if fileno not in WIN_OUTPUT_IDS:
3531 return False
3532
d7cd9a9e 3533 GetStdHandle = compat_ctypes_WINFUNCTYPE(
b58ddb32 3534 ctypes.wintypes.HANDLE, ctypes.wintypes.DWORD)(
d7cd9a9e 3535 ('GetStdHandle', ctypes.windll.kernel32))
b58ddb32
PH
3536 h = GetStdHandle(WIN_OUTPUT_IDS[fileno])
3537
d7cd9a9e 3538 WriteConsoleW = compat_ctypes_WINFUNCTYPE(
b58ddb32
PH
3539 ctypes.wintypes.BOOL, ctypes.wintypes.HANDLE, ctypes.wintypes.LPWSTR,
3540 ctypes.wintypes.DWORD, ctypes.POINTER(ctypes.wintypes.DWORD),
d7cd9a9e 3541 ctypes.wintypes.LPVOID)(('WriteConsoleW', ctypes.windll.kernel32))
b58ddb32
PH
3542 written = ctypes.wintypes.DWORD(0)
3543
d7cd9a9e 3544 GetFileType = compat_ctypes_WINFUNCTYPE(ctypes.wintypes.DWORD, ctypes.wintypes.DWORD)(('GetFileType', ctypes.windll.kernel32))
b58ddb32
PH
3545 FILE_TYPE_CHAR = 0x0002
3546 FILE_TYPE_REMOTE = 0x8000
d7cd9a9e 3547 GetConsoleMode = compat_ctypes_WINFUNCTYPE(
b58ddb32
PH
3548 ctypes.wintypes.BOOL, ctypes.wintypes.HANDLE,
3549 ctypes.POINTER(ctypes.wintypes.DWORD))(
d7cd9a9e 3550 ('GetConsoleMode', ctypes.windll.kernel32))
b58ddb32
PH
3551 INVALID_HANDLE_VALUE = ctypes.wintypes.DWORD(-1).value
3552
3553 def not_a_console(handle):
3554 if handle == INVALID_HANDLE_VALUE or handle is None:
3555 return True
3089bc74
S
3556 return ((GetFileType(handle) & ~FILE_TYPE_REMOTE) != FILE_TYPE_CHAR
3557 or GetConsoleMode(handle, ctypes.byref(ctypes.wintypes.DWORD())) == 0)
b58ddb32
PH
3558
3559 if not_a_console(h):
3560 return False
3561
d1b9c912
PH
3562 def next_nonbmp_pos(s):
3563 try:
3564 return next(i for i, c in enumerate(s) if ord(c) > 0xffff)
3565 except StopIteration:
3566 return len(s)
3567
3568 while s:
3569 count = min(next_nonbmp_pos(s), 1024)
3570
b58ddb32 3571 ret = WriteConsoleW(
d1b9c912 3572 h, s, count if count else 2, ctypes.byref(written), None)
b58ddb32
PH
3573 if ret == 0:
3574 raise OSError('Failed to write string')
d1b9c912
PH
3575 if not count: # We just wrote a non-BMP character
3576 assert written.value == 2
3577 s = s[1:]
3578 else:
3579 assert written.value > 0
3580 s = s[written.value:]
b58ddb32
PH
3581 return True
3582
3583
734f90bb 3584def write_string(s, out=None, encoding=None):
7459e3a2
PH
3585 if out is None:
3586 out = sys.stderr
8bf48f23 3587 assert type(s) == compat_str
7459e3a2 3588
b58ddb32
PH
3589 if sys.platform == 'win32' and encoding is None and hasattr(out, 'fileno'):
3590 if _windows_write_string(s, out):
3591 return
3592
3089bc74
S
3593 if ('b' in getattr(out, 'mode', '')
3594 or sys.version_info[0] < 3): # Python 2 lies about mode of sys.stderr
104aa738
PH
3595 byt = s.encode(encoding or preferredencoding(), 'ignore')
3596 out.write(byt)
3597 elif hasattr(out, 'buffer'):
3598 enc = encoding or getattr(out, 'encoding', None) or preferredencoding()
3599 byt = s.encode(enc, 'ignore')
3600 out.buffer.write(byt)
3601 else:
8bf48f23 3602 out.write(s)
7459e3a2
PH
3603 out.flush()
3604
3605
48ea9cea
PH
3606def bytes_to_intlist(bs):
3607 if not bs:
3608 return []
3609 if isinstance(bs[0], int): # Python 3
3610 return list(bs)
3611 else:
3612 return [ord(c) for c in bs]
3613
c257baff 3614
cba892fa 3615def intlist_to_bytes(xs):
3616 if not xs:
3617 return b''
edaa23f8 3618 return compat_struct_pack('%dB' % len(xs), *xs)
c38b1e77
PH
3619
3620
c1c9a79c
PH
3621# Cross-platform file locking
3622if sys.platform == 'win32':
3623 import ctypes.wintypes
3624 import msvcrt
3625
3626 class OVERLAPPED(ctypes.Structure):
3627 _fields_ = [
3628 ('Internal', ctypes.wintypes.LPVOID),
3629 ('InternalHigh', ctypes.wintypes.LPVOID),
3630 ('Offset', ctypes.wintypes.DWORD),
3631 ('OffsetHigh', ctypes.wintypes.DWORD),
3632 ('hEvent', ctypes.wintypes.HANDLE),
3633 ]
3634
3635 kernel32 = ctypes.windll.kernel32
3636 LockFileEx = kernel32.LockFileEx
3637 LockFileEx.argtypes = [
3638 ctypes.wintypes.HANDLE, # hFile
3639 ctypes.wintypes.DWORD, # dwFlags
3640 ctypes.wintypes.DWORD, # dwReserved
3641 ctypes.wintypes.DWORD, # nNumberOfBytesToLockLow
3642 ctypes.wintypes.DWORD, # nNumberOfBytesToLockHigh
3643 ctypes.POINTER(OVERLAPPED) # Overlapped
3644 ]
3645 LockFileEx.restype = ctypes.wintypes.BOOL
3646 UnlockFileEx = kernel32.UnlockFileEx
3647 UnlockFileEx.argtypes = [
3648 ctypes.wintypes.HANDLE, # hFile
3649 ctypes.wintypes.DWORD, # dwReserved
3650 ctypes.wintypes.DWORD, # nNumberOfBytesToLockLow
3651 ctypes.wintypes.DWORD, # nNumberOfBytesToLockHigh
3652 ctypes.POINTER(OVERLAPPED) # Overlapped
3653 ]
3654 UnlockFileEx.restype = ctypes.wintypes.BOOL
3655 whole_low = 0xffffffff
3656 whole_high = 0x7fffffff
3657
3658 def _lock_file(f, exclusive):
3659 overlapped = OVERLAPPED()
3660 overlapped.Offset = 0
3661 overlapped.OffsetHigh = 0
3662 overlapped.hEvent = 0
3663 f._lock_file_overlapped_p = ctypes.pointer(overlapped)
3664 handle = msvcrt.get_osfhandle(f.fileno())
3665 if not LockFileEx(handle, 0x2 if exclusive else 0x0, 0,
3666 whole_low, whole_high, f._lock_file_overlapped_p):
3667 raise OSError('Locking file failed: %r' % ctypes.FormatError())
3668
3669 def _unlock_file(f):
3670 assert f._lock_file_overlapped_p
3671 handle = msvcrt.get_osfhandle(f.fileno())
3672 if not UnlockFileEx(handle, 0,
3673 whole_low, whole_high, f._lock_file_overlapped_p):
3674 raise OSError('Unlocking file failed: %r' % ctypes.FormatError())
3675
3676else:
399a76e6
YCH
3677 # Some platforms, such as Jython, is missing fcntl
3678 try:
3679 import fcntl
c1c9a79c 3680
399a76e6
YCH
3681 def _lock_file(f, exclusive):
3682 fcntl.flock(f, fcntl.LOCK_EX if exclusive else fcntl.LOCK_SH)
c1c9a79c 3683
399a76e6
YCH
3684 def _unlock_file(f):
3685 fcntl.flock(f, fcntl.LOCK_UN)
3686 except ImportError:
3687 UNSUPPORTED_MSG = 'file locking is not supported on this platform'
3688
3689 def _lock_file(f, exclusive):
3690 raise IOError(UNSUPPORTED_MSG)
3691
3692 def _unlock_file(f):
3693 raise IOError(UNSUPPORTED_MSG)
c1c9a79c
PH
3694
3695
3696class locked_file(object):
3697 def __init__(self, filename, mode, encoding=None):
3698 assert mode in ['r', 'a', 'w']
3699 self.f = io.open(filename, mode, encoding=encoding)
3700 self.mode = mode
3701
3702 def __enter__(self):
3703 exclusive = self.mode != 'r'
3704 try:
3705 _lock_file(self.f, exclusive)
3706 except IOError:
3707 self.f.close()
3708 raise
3709 return self
3710
3711 def __exit__(self, etype, value, traceback):
3712 try:
3713 _unlock_file(self.f)
3714 finally:
3715 self.f.close()
3716
3717 def __iter__(self):
3718 return iter(self.f)
3719
3720 def write(self, *args):
3721 return self.f.write(*args)
3722
3723 def read(self, *args):
3724 return self.f.read(*args)
4eb7f1d1
JMF
3725
3726
4644ac55
S
3727def get_filesystem_encoding():
3728 encoding = sys.getfilesystemencoding()
3729 return encoding if encoding is not None else 'utf-8'
3730
3731
4eb7f1d1 3732def shell_quote(args):
a6a173c2 3733 quoted_args = []
4644ac55 3734 encoding = get_filesystem_encoding()
a6a173c2
JMF
3735 for a in args:
3736 if isinstance(a, bytes):
3737 # We may get a filename encoded with 'encodeFilename'
3738 a = a.decode(encoding)
aefce8e6 3739 quoted_args.append(compat_shlex_quote(a))
28e614de 3740 return ' '.join(quoted_args)
9d4660ca
PH
3741
3742
3743def smuggle_url(url, data):
3744 """ Pass additional data in a URL for internal use. """
3745
81953d1a
RA
3746 url, idata = unsmuggle_url(url, {})
3747 data.update(idata)
15707c7e 3748 sdata = compat_urllib_parse_urlencode(
28e614de
PH
3749 {'__youtubedl_smuggle': json.dumps(data)})
3750 return url + '#' + sdata
9d4660ca
PH
3751
3752
79f82953 3753def unsmuggle_url(smug_url, default=None):
83e865a3 3754 if '#__youtubedl_smuggle' not in smug_url:
79f82953 3755 return smug_url, default
28e614de
PH
3756 url, _, sdata = smug_url.rpartition('#')
3757 jsond = compat_parse_qs(sdata)['__youtubedl_smuggle'][0]
9d4660ca
PH
3758 data = json.loads(jsond)
3759 return url, data
02dbf93f
PH
3760
3761
e0fd9573 3762def format_decimal_suffix(num, fmt='%d%s', *, factor=1000):
3763 """ Formats numbers with decimal sufixes like K, M, etc """
3764 num, factor = float_or_none(num), float(factor)
3765 if num is None:
3766 return None
3767 exponent = 0 if num == 0 else int(math.log(num, factor))
abbeeebc 3768 suffix = ['', *'kMGTPEZY'][exponent]
3769 if factor == 1024:
3770 suffix = {'k': 'Ki', '': ''}.get(suffix, f'{suffix}i')
e0fd9573 3771 converted = num / (factor ** exponent)
abbeeebc 3772 return fmt % (converted, suffix)
e0fd9573 3773
3774
02dbf93f 3775def format_bytes(bytes):
f02d24d8 3776 return format_decimal_suffix(bytes, '%.2f%sB', factor=1024) or 'N/A'
f53c966a 3777
1c088fa8 3778
fb47597b
S
3779def lookup_unit_table(unit_table, s):
3780 units_re = '|'.join(re.escape(u) for u in unit_table)
3781 m = re.match(
782b1b5b 3782 r'(?P<num>[0-9]+(?:[,.][0-9]*)?)\s*(?P<unit>%s)\b' % units_re, s)
fb47597b
S
3783 if not m:
3784 return None
3785 num_str = m.group('num').replace(',', '.')
3786 mult = unit_table[m.group('unit')]
3787 return int(float(num_str) * mult)
3788
3789
be64b5b0
PH
3790def parse_filesize(s):
3791 if s is None:
3792 return None
3793
dfb1b146 3794 # The lower-case forms are of course incorrect and unofficial,
be64b5b0
PH
3795 # but we support those too
3796 _UNIT_TABLE = {
3797 'B': 1,
3798 'b': 1,
70852b47 3799 'bytes': 1,
be64b5b0
PH
3800 'KiB': 1024,
3801 'KB': 1000,
3802 'kB': 1024,
3803 'Kb': 1000,
13585d76 3804 'kb': 1000,
70852b47
YCH
3805 'kilobytes': 1000,
3806 'kibibytes': 1024,
be64b5b0
PH
3807 'MiB': 1024 ** 2,
3808 'MB': 1000 ** 2,
3809 'mB': 1024 ** 2,
3810 'Mb': 1000 ** 2,
13585d76 3811 'mb': 1000 ** 2,
70852b47
YCH
3812 'megabytes': 1000 ** 2,
3813 'mebibytes': 1024 ** 2,
be64b5b0
PH
3814 'GiB': 1024 ** 3,
3815 'GB': 1000 ** 3,
3816 'gB': 1024 ** 3,
3817 'Gb': 1000 ** 3,
13585d76 3818 'gb': 1000 ** 3,
70852b47
YCH
3819 'gigabytes': 1000 ** 3,
3820 'gibibytes': 1024 ** 3,
be64b5b0
PH
3821 'TiB': 1024 ** 4,
3822 'TB': 1000 ** 4,
3823 'tB': 1024 ** 4,
3824 'Tb': 1000 ** 4,
13585d76 3825 'tb': 1000 ** 4,
70852b47
YCH
3826 'terabytes': 1000 ** 4,
3827 'tebibytes': 1024 ** 4,
be64b5b0
PH
3828 'PiB': 1024 ** 5,
3829 'PB': 1000 ** 5,
3830 'pB': 1024 ** 5,
3831 'Pb': 1000 ** 5,
13585d76 3832 'pb': 1000 ** 5,
70852b47
YCH
3833 'petabytes': 1000 ** 5,
3834 'pebibytes': 1024 ** 5,
be64b5b0
PH
3835 'EiB': 1024 ** 6,
3836 'EB': 1000 ** 6,
3837 'eB': 1024 ** 6,
3838 'Eb': 1000 ** 6,
13585d76 3839 'eb': 1000 ** 6,
70852b47
YCH
3840 'exabytes': 1000 ** 6,
3841 'exbibytes': 1024 ** 6,
be64b5b0
PH
3842 'ZiB': 1024 ** 7,
3843 'ZB': 1000 ** 7,
3844 'zB': 1024 ** 7,
3845 'Zb': 1000 ** 7,
13585d76 3846 'zb': 1000 ** 7,
70852b47
YCH
3847 'zettabytes': 1000 ** 7,
3848 'zebibytes': 1024 ** 7,
be64b5b0
PH
3849 'YiB': 1024 ** 8,
3850 'YB': 1000 ** 8,
3851 'yB': 1024 ** 8,
3852 'Yb': 1000 ** 8,
13585d76 3853 'yb': 1000 ** 8,
70852b47
YCH
3854 'yottabytes': 1000 ** 8,
3855 'yobibytes': 1024 ** 8,
be64b5b0
PH
3856 }
3857
fb47597b
S
3858 return lookup_unit_table(_UNIT_TABLE, s)
3859
3860
3861def parse_count(s):
3862 if s is None:
be64b5b0
PH
3863 return None
3864
352d5da8 3865 s = re.sub(r'^[^\d]+\s', '', s).strip()
fb47597b
S
3866
3867 if re.match(r'^[\d,.]+$', s):
3868 return str_to_int(s)
3869
3870 _UNIT_TABLE = {
3871 'k': 1000,
3872 'K': 1000,
3873 'm': 1000 ** 2,
3874 'M': 1000 ** 2,
3875 'kk': 1000 ** 2,
3876 'KK': 1000 ** 2,
352d5da8 3877 'b': 1000 ** 3,
3878 'B': 1000 ** 3,
fb47597b 3879 }
be64b5b0 3880
352d5da8 3881 ret = lookup_unit_table(_UNIT_TABLE, s)
3882 if ret is not None:
3883 return ret
3884
3885 mobj = re.match(r'([\d,.]+)(?:$|\s)', s)
3886 if mobj:
3887 return str_to_int(mobj.group(1))
be64b5b0 3888
2f7ae819 3889
b871d7e9
S
3890def parse_resolution(s):
3891 if s is None:
3892 return {}
3893
17ec8bcf 3894 mobj = re.search(r'(?<![a-zA-Z0-9])(?P<w>\d+)\s*[xX×,]\s*(?P<h>\d+)(?![a-zA-Z0-9])', s)
b871d7e9
S
3895 if mobj:
3896 return {
3897 'width': int(mobj.group('w')),
3898 'height': int(mobj.group('h')),
3899 }
3900
17ec8bcf 3901 mobj = re.search(r'(?<![a-zA-Z0-9])(\d+)[pPiI](?![a-zA-Z0-9])', s)
b871d7e9
S
3902 if mobj:
3903 return {'height': int(mobj.group(1))}
3904
3905 mobj = re.search(r'\b([48])[kK]\b', s)
3906 if mobj:
3907 return {'height': int(mobj.group(1)) * 540}
3908
3909 return {}
3910
3911
0dc41787
S
3912def parse_bitrate(s):
3913 if not isinstance(s, compat_str):
3914 return
3915 mobj = re.search(r'\b(\d+)\s*kbps', s)
3916 if mobj:
3917 return int(mobj.group(1))
3918
3919
a942d6cb 3920def month_by_name(name, lang='en'):
caefb1de
PH
3921 """ Return the number of a month by (locale-independently) English name """
3922
f6717dec 3923 month_names = MONTH_NAMES.get(lang, MONTH_NAMES['en'])
a942d6cb 3924
caefb1de 3925 try:
f6717dec 3926 return month_names.index(name) + 1
7105440c
YCH
3927 except ValueError:
3928 return None
3929
3930
3931def month_by_abbreviation(abbrev):
3932 """ Return the number of a month by (locale-independently) English
3933 abbreviations """
3934
3935 try:
3936 return [s[:3] for s in ENGLISH_MONTH_NAMES].index(abbrev) + 1
caefb1de
PH
3937 except ValueError:
3938 return None
18258362
JMF
3939
3940
5aafe895 3941def fix_xml_ampersands(xml_str):
18258362 3942 """Replace all the '&' by '&amp;' in XML"""
5aafe895
PH
3943 return re.sub(
3944 r'&(?!amp;|lt;|gt;|apos;|quot;|#x[0-9a-fA-F]{,4};|#[0-9]{,4};)',
28e614de 3945 '&amp;',
5aafe895 3946 xml_str)
e3946f98
PH
3947
3948
3949def setproctitle(title):
8bf48f23 3950 assert isinstance(title, compat_str)
c1c05c67
YCH
3951
3952 # ctypes in Jython is not complete
3953 # http://bugs.jython.org/issue2148
3954 if sys.platform.startswith('java'):
3955 return
3956
e3946f98 3957 try:
611c1dd9 3958 libc = ctypes.cdll.LoadLibrary('libc.so.6')
e3946f98
PH
3959 except OSError:
3960 return
2f49bcd6
RC
3961 except TypeError:
3962 # LoadLibrary in Windows Python 2.7.13 only expects
3963 # a bytestring, but since unicode_literals turns
3964 # every string into a unicode string, it fails.
3965 return
6eefe533
PH
3966 title_bytes = title.encode('utf-8')
3967 buf = ctypes.create_string_buffer(len(title_bytes))
3968 buf.value = title_bytes
e3946f98 3969 try:
6eefe533 3970 libc.prctl(15, buf, 0, 0, 0)
e3946f98
PH
3971 except AttributeError:
3972 return # Strange libc, just skip this
d7dda168
PH
3973
3974
3975def remove_start(s, start):
46bc9b7d 3976 return s[len(start):] if s is not None and s.startswith(start) else s
29eb5174
PH
3977
3978
2b9faf55 3979def remove_end(s, end):
46bc9b7d 3980 return s[:-len(end)] if s is not None and s.endswith(end) else s
2b9faf55
PH
3981
3982
31b2051e
S
3983def remove_quotes(s):
3984 if s is None or len(s) < 2:
3985 return s
3986 for quote in ('"', "'", ):
3987 if s[0] == quote and s[-1] == quote:
3988 return s[1:-1]
3989 return s
3990
3991
b6e0c7d2
U
3992def get_domain(url):
3993 domain = re.match(r'(?:https?:\/\/)?(?:www\.)?(?P<domain>[^\n\/]+\.[^\n\/]+)(?:\/(.*))?', url)
3994 return domain.group('domain') if domain else None
3995
3996
29eb5174 3997def url_basename(url):
9b8aaeed 3998 path = compat_urlparse.urlparse(url).path
28e614de 3999 return path.strip('/').split('/')[-1]
aa94a6d3
PH
4000
4001
02dc0a36
S
4002def base_url(url):
4003 return re.match(r'https?://[^?#&]+/', url).group()
4004
4005
e34c3361 4006def urljoin(base, path):
4b5de77b
S
4007 if isinstance(path, bytes):
4008 path = path.decode('utf-8')
e34c3361
S
4009 if not isinstance(path, compat_str) or not path:
4010 return None
fad4ceb5 4011 if re.match(r'^(?:[a-zA-Z][a-zA-Z0-9+-.]*:)?//', path):
e34c3361 4012 return path
4b5de77b
S
4013 if isinstance(base, bytes):
4014 base = base.decode('utf-8')
4015 if not isinstance(base, compat_str) or not re.match(
4016 r'^(?:https?:)?//', base):
e34c3361
S
4017 return None
4018 return compat_urlparse.urljoin(base, path)
4019
4020
aa94a6d3
PH
4021class HEADRequest(compat_urllib_request.Request):
4022 def get_method(self):
611c1dd9 4023 return 'HEAD'
7217e148
PH
4024
4025
95cf60e8
S
4026class PUTRequest(compat_urllib_request.Request):
4027 def get_method(self):
4028 return 'PUT'
4029
4030
9732d77e 4031def int_or_none(v, scale=1, default=None, get_attr=None, invscale=1):
9e907ebd 4032 if get_attr and v is not None:
4033 v = getattr(v, get_attr, None)
1812afb7
S
4034 try:
4035 return int(v) * invscale // scale
31c49255 4036 except (ValueError, TypeError, OverflowError):
af98f8ff 4037 return default
9732d77e 4038
9572013d 4039
40a90862
JMF
4040def str_or_none(v, default=None):
4041 return default if v is None else compat_str(v)
4042
9732d77e
PH
4043
4044def str_to_int(int_str):
48d4681e 4045 """ A more relaxed version of int_or_none """
42db58ec 4046 if isinstance(int_str, compat_integer_types):
348c6bf1 4047 return int_str
42db58ec
S
4048 elif isinstance(int_str, compat_str):
4049 int_str = re.sub(r'[,\.\+]', '', int_str)
4050 return int_or_none(int_str)
608d11f5
PH
4051
4052
9732d77e 4053def float_or_none(v, scale=1, invscale=1, default=None):
caf80631
S
4054 if v is None:
4055 return default
4056 try:
4057 return float(v) * invscale / scale
5e1271c5 4058 except (ValueError, TypeError):
caf80631 4059 return default
43f775e4
PH
4060
4061
c7e327c4
S
4062def bool_or_none(v, default=None):
4063 return v if isinstance(v, bool) else default
4064
4065
53cd37ba
S
4066def strip_or_none(v, default=None):
4067 return v.strip() if isinstance(v, compat_str) else default
b72b4431
S
4068
4069
af03000a
S
4070def url_or_none(url):
4071 if not url or not isinstance(url, compat_str):
4072 return None
4073 url = url.strip()
29f7c58a 4074 return url if re.match(r'^(?:(?:https?|rt(?:m(?:pt?[es]?|fp)|sp[su]?)|mms|ftps?):)?//', url) else None
af03000a
S
4075
4076
e29663c6 4077def strftime_or_none(timestamp, date_format, default=None):
4078 datetime_object = None
4079 try:
4080 if isinstance(timestamp, compat_numeric_types): # unix timestamp
4081 datetime_object = datetime.datetime.utcfromtimestamp(timestamp)
4082 elif isinstance(timestamp, compat_str): # assume YYYYMMDD
4083 datetime_object = datetime.datetime.strptime(timestamp, '%Y%m%d')
4084 return datetime_object.strftime(date_format)
4085 except (ValueError, TypeError, AttributeError):
4086 return default
4087
4088
608d11f5 4089def parse_duration(s):
8f9312c3 4090 if not isinstance(s, compat_basestring):
608d11f5 4091 return None
ca7b3246 4092 s = s.strip()
38d79fd1 4093 if not s:
4094 return None
ca7b3246 4095
acaff495 4096 days, hours, mins, secs, ms = [None] * 5
15846398 4097 m = re.match(r'(?:(?:(?:(?P<days>[0-9]+):)?(?P<hours>[0-9]+):)?(?P<mins>[0-9]+):)?(?P<secs>[0-9]+)(?P<ms>\.[0-9]+)?Z?$', s)
acaff495 4098 if m:
4099 days, hours, mins, secs, ms = m.groups()
4100 else:
4101 m = re.match(
056653bb
S
4102 r'''(?ix)(?:P?
4103 (?:
4104 [0-9]+\s*y(?:ears?)?\s*
4105 )?
4106 (?:
4107 [0-9]+\s*m(?:onths?)?\s*
4108 )?
4109 (?:
4110 [0-9]+\s*w(?:eeks?)?\s*
4111 )?
8f4b58d7 4112 (?:
acaff495 4113 (?P<days>[0-9]+)\s*d(?:ays?)?\s*
8f4b58d7 4114 )?
056653bb 4115 T)?
acaff495 4116 (?:
4117 (?P<hours>[0-9]+)\s*h(?:ours?)?\s*
4118 )?
4119 (?:
4120 (?P<mins>[0-9]+)\s*m(?:in(?:ute)?s?)?\s*
4121 )?
4122 (?:
4123 (?P<secs>[0-9]+)(?P<ms>\.[0-9]+)?\s*s(?:ec(?:ond)?s?)?\s*
15846398 4124 )?Z?$''', s)
acaff495 4125 if m:
4126 days, hours, mins, secs, ms = m.groups()
4127 else:
15846398 4128 m = re.match(r'(?i)(?:(?P<hours>[0-9.]+)\s*(?:hours?)|(?P<mins>[0-9.]+)\s*(?:mins?\.?|minutes?)\s*)Z?$', s)
acaff495 4129 if m:
4130 hours, mins = m.groups()
4131 else:
4132 return None
4133
4134 duration = 0
4135 if secs:
4136 duration += float(secs)
4137 if mins:
4138 duration += float(mins) * 60
4139 if hours:
4140 duration += float(hours) * 60 * 60
4141 if days:
4142 duration += float(days) * 24 * 60 * 60
4143 if ms:
4144 duration += float(ms)
4145 return duration
91d7d0b3
JMF
4146
4147
e65e4c88 4148def prepend_extension(filename, ext, expected_real_ext=None):
5f6a1245 4149 name, real_ext = os.path.splitext(filename)
e65e4c88
S
4150 return (
4151 '{0}.{1}{2}'.format(name, ext, real_ext)
4152 if not expected_real_ext or real_ext[1:] == expected_real_ext
4153 else '{0}.{1}'.format(filename, ext))
d70ad093
PH
4154
4155
b3ed15b7
S
4156def replace_extension(filename, ext, expected_real_ext=None):
4157 name, real_ext = os.path.splitext(filename)
4158 return '{0}.{1}'.format(
4159 name if not expected_real_ext or real_ext[1:] == expected_real_ext else filename,
4160 ext)
4161
4162
d70ad093
PH
4163def check_executable(exe, args=[]):
4164 """ Checks if the given binary is installed somewhere in PATH, and returns its name.
4165 args can be a list of arguments for a short output (like -version) """
4166 try:
d3c93ec2 4167 Popen([exe] + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate_or_kill()
d70ad093
PH
4168 except OSError:
4169 return False
4170 return exe
b7ab0590
PH
4171
4172
9af98e17 4173def _get_exe_version_output(exe, args):
95807118 4174 try:
b64d04c1 4175 # STDIN should be redirected too. On UNIX-like systems, ffmpeg triggers
7a5c1cfe 4176 # SIGTTOU if yt-dlp is run in the background.
067aa17e 4177 # See https://github.com/ytdl-org/youtube-dl/issues/955#issuecomment-209789656
d3c93ec2 4178 out, _ = Popen(
4179 [encodeArgument(exe)] + args, stdin=subprocess.PIPE,
4180 stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate_or_kill()
95807118
PH
4181 except OSError:
4182 return False
cae97f65
PH
4183 if isinstance(out, bytes): # Python 2.x
4184 out = out.decode('ascii', 'ignore')
9af98e17 4185 return out
cae97f65
PH
4186
4187
4188def detect_exe_version(output, version_re=None, unrecognized='present'):
4189 assert isinstance(output, compat_str)
4190 if version_re is None:
4191 version_re = r'version\s+([-0-9._a-zA-Z]+)'
4192 m = re.search(version_re, output)
95807118
PH
4193 if m:
4194 return m.group(1)
4195 else:
4196 return unrecognized
4197
4198
9af98e17 4199def get_exe_version(exe, args=['--version'],
4200 version_re=None, unrecognized='present'):
4201 """ Returns the version of the specified executable,
4202 or False if the executable is not present """
4203 out = _get_exe_version_output(exe, args)
4204 return detect_exe_version(out, version_re, unrecognized) if out else False
4205
4206
cb89cfc1 4207class LazyList(collections.abc.Sequence):
483336e7 4208 ''' Lazy immutable list from an iterable
4209 Note that slices of a LazyList are lists and not LazyList'''
4210
8e5fecc8 4211 class IndexError(IndexError):
4212 pass
4213
282f5709 4214 def __init__(self, iterable, *, reverse=False, _cache=None):
483336e7 4215 self.__iterable = iter(iterable)
282f5709 4216 self.__cache = [] if _cache is None else _cache
4217 self.__reversed = reverse
483336e7 4218
4219 def __iter__(self):
28419ca2 4220 if self.__reversed:
4221 # We need to consume the entire iterable to iterate in reverse
981052c9 4222 yield from self.exhaust()
28419ca2 4223 return
4224 yield from self.__cache
483336e7 4225 for item in self.__iterable:
4226 self.__cache.append(item)
4227 yield item
4228
981052c9 4229 def __exhaust(self):
483336e7 4230 self.__cache.extend(self.__iterable)
9f1a1c36 4231 # Discard the emptied iterable to make it pickle-able
4232 self.__iterable = []
28419ca2 4233 return self.__cache
4234
981052c9 4235 def exhaust(self):
4236 ''' Evaluate the entire iterable '''
4237 return self.__exhaust()[::-1 if self.__reversed else 1]
4238
28419ca2 4239 @staticmethod
981052c9 4240 def __reverse_index(x):
e0f2b4b4 4241 return None if x is None else -(x + 1)
483336e7 4242
4243 def __getitem__(self, idx):
4244 if isinstance(idx, slice):
28419ca2 4245 if self.__reversed:
e0f2b4b4 4246 idx = slice(self.__reverse_index(idx.start), self.__reverse_index(idx.stop), -(idx.step or 1))
4247 start, stop, step = idx.start, idx.stop, idx.step or 1
483336e7 4248 elif isinstance(idx, int):
28419ca2 4249 if self.__reversed:
981052c9 4250 idx = self.__reverse_index(idx)
e0f2b4b4 4251 start, stop, step = idx, idx, 0
483336e7 4252 else:
4253 raise TypeError('indices must be integers or slices')
e0f2b4b4 4254 if ((start or 0) < 0 or (stop or 0) < 0
4255 or (start is None and step < 0)
4256 or (stop is None and step > 0)):
483336e7 4257 # We need to consume the entire iterable to be able to slice from the end
4258 # Obviously, never use this with infinite iterables
8e5fecc8 4259 self.__exhaust()
4260 try:
4261 return self.__cache[idx]
4262 except IndexError as e:
4263 raise self.IndexError(e) from e
e0f2b4b4 4264 n = max(start or 0, stop or 0) - len(self.__cache) + 1
28419ca2 4265 if n > 0:
4266 self.__cache.extend(itertools.islice(self.__iterable, n))
8e5fecc8 4267 try:
4268 return self.__cache[idx]
4269 except IndexError as e:
4270 raise self.IndexError(e) from e
483336e7 4271
4272 def __bool__(self):
4273 try:
28419ca2 4274 self[-1] if self.__reversed else self[0]
8e5fecc8 4275 except self.IndexError:
483336e7 4276 return False
4277 return True
4278
4279 def __len__(self):
8e5fecc8 4280 self.__exhaust()
483336e7 4281 return len(self.__cache)
4282
282f5709 4283 def __reversed__(self):
4284 return type(self)(self.__iterable, reverse=not self.__reversed, _cache=self.__cache)
4285
4286 def __copy__(self):
4287 return type(self)(self.__iterable, reverse=self.__reversed, _cache=self.__cache)
4288
28419ca2 4289 def __repr__(self):
4290 # repr and str should mimic a list. So we exhaust the iterable
4291 return repr(self.exhaust())
4292
4293 def __str__(self):
4294 return repr(self.exhaust())
4295
483336e7 4296
7be9ccff 4297class PagedList:
c07a39ae 4298
4299 class IndexError(IndexError):
4300 pass
4301
dd26ced1
PH
4302 def __len__(self):
4303 # This is only useful for tests
4304 return len(self.getslice())
4305
7be9ccff 4306 def __init__(self, pagefunc, pagesize, use_cache=True):
4307 self._pagefunc = pagefunc
4308 self._pagesize = pagesize
4309 self._use_cache = use_cache
4310 self._cache = {}
4311
4312 def getpage(self, pagenum):
d8cf8d97 4313 page_results = self._cache.get(pagenum)
4314 if page_results is None:
4315 page_results = list(self._pagefunc(pagenum))
7be9ccff 4316 if self._use_cache:
4317 self._cache[pagenum] = page_results
4318 return page_results
4319
4320 def getslice(self, start=0, end=None):
4321 return list(self._getslice(start, end))
4322
4323 def _getslice(self, start, end):
55575225 4324 raise NotImplementedError('This method must be implemented by subclasses')
4325
4326 def __getitem__(self, idx):
7be9ccff 4327 # NOTE: cache must be enabled if this is used
55575225 4328 if not isinstance(idx, int) or idx < 0:
4329 raise TypeError('indices must be non-negative integers')
4330 entries = self.getslice(idx, idx + 1)
d8cf8d97 4331 if not entries:
c07a39ae 4332 raise self.IndexError()
d8cf8d97 4333 return entries[0]
55575225 4334
9c44d242
PH
4335
4336class OnDemandPagedList(PagedList):
7be9ccff 4337 def _getslice(self, start, end):
b7ab0590
PH
4338 for pagenum in itertools.count(start // self._pagesize):
4339 firstid = pagenum * self._pagesize
4340 nextfirstid = pagenum * self._pagesize + self._pagesize
4341 if start >= nextfirstid:
4342 continue
4343
b7ab0590
PH
4344 startv = (
4345 start % self._pagesize
4346 if firstid <= start < nextfirstid
4347 else 0)
b7ab0590
PH
4348 endv = (
4349 ((end - 1) % self._pagesize) + 1
4350 if (end is not None and firstid <= end <= nextfirstid)
4351 else None)
4352
7be9ccff 4353 page_results = self.getpage(pagenum)
b7ab0590
PH
4354 if startv != 0 or endv is not None:
4355 page_results = page_results[startv:endv]
7be9ccff 4356 yield from page_results
b7ab0590
PH
4357
4358 # A little optimization - if current page is not "full", ie. does
4359 # not contain page_size videos then we can assume that this page
4360 # is the last one - there are no more ids on further pages -
4361 # i.e. no need to query again.
4362 if len(page_results) + startv < self._pagesize:
4363 break
4364
4365 # If we got the whole page, but the next page is not interesting,
4366 # break out early as well
4367 if end == nextfirstid:
4368 break
81c2f20b
PH
4369
4370
9c44d242
PH
4371class InAdvancePagedList(PagedList):
4372 def __init__(self, pagefunc, pagecount, pagesize):
9c44d242 4373 self._pagecount = pagecount
7be9ccff 4374 PagedList.__init__(self, pagefunc, pagesize, True)
9c44d242 4375
7be9ccff 4376 def _getslice(self, start, end):
9c44d242
PH
4377 start_page = start // self._pagesize
4378 end_page = (
4379 self._pagecount if end is None else (end // self._pagesize + 1))
4380 skip_elems = start - start_page * self._pagesize
4381 only_more = None if end is None else end - start
4382 for pagenum in range(start_page, end_page):
7be9ccff 4383 page_results = self.getpage(pagenum)
9c44d242 4384 if skip_elems:
7be9ccff 4385 page_results = page_results[skip_elems:]
9c44d242
PH
4386 skip_elems = None
4387 if only_more is not None:
7be9ccff 4388 if len(page_results) < only_more:
4389 only_more -= len(page_results)
9c44d242 4390 else:
7be9ccff 4391 yield from page_results[:only_more]
9c44d242 4392 break
7be9ccff 4393 yield from page_results
9c44d242
PH
4394
4395
81c2f20b 4396def uppercase_escape(s):
676eb3f2 4397 unicode_escape = codecs.getdecoder('unicode_escape')
81c2f20b 4398 return re.sub(
a612753d 4399 r'\\U[0-9a-fA-F]{8}',
676eb3f2
PH
4400 lambda m: unicode_escape(m.group(0))[0],
4401 s)
0fe2ff78
YCH
4402
4403
4404def lowercase_escape(s):
4405 unicode_escape = codecs.getdecoder('unicode_escape')
4406 return re.sub(
4407 r'\\u[0-9a-fA-F]{4}',
4408 lambda m: unicode_escape(m.group(0))[0],
4409 s)
b53466e1 4410
d05cfe06
S
4411
4412def escape_rfc3986(s):
4413 """Escape non-ASCII characters as suggested by RFC 3986"""
8f9312c3 4414 if sys.version_info < (3, 0) and isinstance(s, compat_str):
d05cfe06 4415 s = s.encode('utf-8')
ecc0c5ee 4416 return compat_urllib_parse.quote(s, b"%/;:@&=+$,!~*'()?#[]")
d05cfe06
S
4417
4418
4419def escape_url(url):
4420 """Escape URL as suggested by RFC 3986"""
4421 url_parsed = compat_urllib_parse_urlparse(url)
4422 return url_parsed._replace(
efbed08d 4423 netloc=url_parsed.netloc.encode('idna').decode('ascii'),
d05cfe06
S
4424 path=escape_rfc3986(url_parsed.path),
4425 params=escape_rfc3986(url_parsed.params),
4426 query=escape_rfc3986(url_parsed.query),
4427 fragment=escape_rfc3986(url_parsed.fragment)
4428 ).geturl()
4429
62e609ab 4430
4dfbf869 4431def parse_qs(url):
4432 return compat_parse_qs(compat_urllib_parse_urlparse(url).query)
4433
4434
62e609ab
PH
4435def read_batch_urls(batch_fd):
4436 def fixup(url):
4437 if not isinstance(url, compat_str):
4438 url = url.decode('utf-8', 'replace')
8c04f0be 4439 BOM_UTF8 = ('\xef\xbb\xbf', '\ufeff')
4440 for bom in BOM_UTF8:
4441 if url.startswith(bom):
4442 url = url[len(bom):]
4443 url = url.lstrip()
4444 if not url or url.startswith(('#', ';', ']')):
62e609ab 4445 return False
8c04f0be 4446 # "#" cannot be stripped out since it is part of the URI
4447 # However, it can be safely stipped out if follwing a whitespace
4448 return re.split(r'\s#', url, 1)[0].rstrip()
62e609ab
PH
4449
4450 with contextlib.closing(batch_fd) as fd:
4451 return [url for url in map(fixup, fd) if url]
b74fa8cd
JMF
4452
4453
4454def urlencode_postdata(*args, **kargs):
15707c7e 4455 return compat_urllib_parse_urlencode(*args, **kargs).encode('ascii')
bcf89ce6
PH
4456
4457
38f9ef31 4458def update_url_query(url, query):
cacd9966
YCH
4459 if not query:
4460 return url
38f9ef31 4461 parsed_url = compat_urlparse.urlparse(url)
4462 qs = compat_parse_qs(parsed_url.query)
4463 qs.update(query)
4464 return compat_urlparse.urlunparse(parsed_url._replace(
15707c7e 4465 query=compat_urllib_parse_urlencode(qs, True)))
16392824 4466
8e60dc75 4467
ed0291d1
S
4468def update_Request(req, url=None, data=None, headers={}, query={}):
4469 req_headers = req.headers.copy()
4470 req_headers.update(headers)
4471 req_data = data or req.data
4472 req_url = update_url_query(url or req.get_full_url(), query)
95cf60e8
S
4473 req_get_method = req.get_method()
4474 if req_get_method == 'HEAD':
4475 req_type = HEADRequest
4476 elif req_get_method == 'PUT':
4477 req_type = PUTRequest
4478 else:
4479 req_type = compat_urllib_request.Request
ed0291d1
S
4480 new_req = req_type(
4481 req_url, data=req_data, headers=req_headers,
4482 origin_req_host=req.origin_req_host, unverifiable=req.unverifiable)
4483 if hasattr(req, 'timeout'):
4484 new_req.timeout = req.timeout
4485 return new_req
4486
4487
10c87c15 4488def _multipart_encode_impl(data, boundary):
0c265486
YCH
4489 content_type = 'multipart/form-data; boundary=%s' % boundary
4490
4491 out = b''
4492 for k, v in data.items():
4493 out += b'--' + boundary.encode('ascii') + b'\r\n'
4494 if isinstance(k, compat_str):
4495 k = k.encode('utf-8')
4496 if isinstance(v, compat_str):
4497 v = v.encode('utf-8')
4498 # RFC 2047 requires non-ASCII field names to be encoded, while RFC 7578
4499 # suggests sending UTF-8 directly. Firefox sends UTF-8, too
b2ad479d 4500 content = b'Content-Disposition: form-data; name="' + k + b'"\r\n\r\n' + v + b'\r\n'
0c265486
YCH
4501 if boundary.encode('ascii') in content:
4502 raise ValueError('Boundary overlaps with data')
4503 out += content
4504
4505 out += b'--' + boundary.encode('ascii') + b'--\r\n'
4506
4507 return out, content_type
4508
4509
4510def multipart_encode(data, boundary=None):
4511 '''
4512 Encode a dict to RFC 7578-compliant form-data
4513
4514 data:
4515 A dict where keys and values can be either Unicode or bytes-like
4516 objects.
4517 boundary:
4518 If specified a Unicode object, it's used as the boundary. Otherwise
4519 a random boundary is generated.
4520
4521 Reference: https://tools.ietf.org/html/rfc7578
4522 '''
4523 has_specified_boundary = boundary is not None
4524
4525 while True:
4526 if boundary is None:
4527 boundary = '---------------' + str(random.randrange(0x0fffffff, 0xffffffff))
4528
4529 try:
10c87c15 4530 out, content_type = _multipart_encode_impl(data, boundary)
0c265486
YCH
4531 break
4532 except ValueError:
4533 if has_specified_boundary:
4534 raise
4535 boundary = None
4536
4537 return out, content_type
4538
4539
86296ad2 4540def dict_get(d, key_or_keys, default=None, skip_false_values=True):
cbecc9b9
S
4541 if isinstance(key_or_keys, (list, tuple)):
4542 for key in key_or_keys:
86296ad2
S
4543 if key not in d or d[key] is None or skip_false_values and not d[key]:
4544 continue
4545 return d[key]
cbecc9b9
S
4546 return default
4547 return d.get(key_or_keys, default)
4548
4549
329ca3be 4550def try_get(src, getter, expected_type=None):
6606817a 4551 for get in variadic(getter):
a32a9a7e
S
4552 try:
4553 v = get(src)
4554 except (AttributeError, KeyError, TypeError, IndexError):
4555 pass
4556 else:
4557 if expected_type is None or isinstance(v, expected_type):
4558 return v
329ca3be
S
4559
4560
6cc62232
S
4561def merge_dicts(*dicts):
4562 merged = {}
4563 for a_dict in dicts:
4564 for k, v in a_dict.items():
4565 if v is None:
4566 continue
3089bc74
S
4567 if (k not in merged
4568 or (isinstance(v, compat_str) and v
4569 and isinstance(merged[k], compat_str)
4570 and not merged[k])):
6cc62232
S
4571 merged[k] = v
4572 return merged
4573
4574
8e60dc75
S
4575def encode_compat_str(string, encoding=preferredencoding(), errors='strict'):
4576 return string if isinstance(string, compat_str) else compat_str(string, encoding, errors)
4577
16392824 4578
a1a530b0
PH
4579US_RATINGS = {
4580 'G': 0,
4581 'PG': 10,
4582 'PG-13': 13,
4583 'R': 16,
4584 'NC': 18,
4585}
fac55558
PH
4586
4587
a8795327 4588TV_PARENTAL_GUIDELINES = {
5a16c9d9
RA
4589 'TV-Y': 0,
4590 'TV-Y7': 7,
4591 'TV-G': 0,
4592 'TV-PG': 0,
4593 'TV-14': 14,
4594 'TV-MA': 17,
a8795327
S
4595}
4596
4597
146c80e2 4598def parse_age_limit(s):
a8795327
S
4599 if type(s) == int:
4600 return s if 0 <= s <= 21 else None
4601 if not isinstance(s, compat_basestring):
d838b1bd 4602 return None
146c80e2 4603 m = re.match(r'^(?P<age>\d{1,2})\+?$', s)
a8795327
S
4604 if m:
4605 return int(m.group('age'))
5c5fae6d 4606 s = s.upper()
a8795327
S
4607 if s in US_RATINGS:
4608 return US_RATINGS[s]
5a16c9d9 4609 m = re.match(r'^TV[_-]?(%s)$' % '|'.join(k[3:] for k in TV_PARENTAL_GUIDELINES), s)
b8361187 4610 if m:
5a16c9d9 4611 return TV_PARENTAL_GUIDELINES['TV-' + m.group(1)]
b8361187 4612 return None
146c80e2
S
4613
4614
fac55558 4615def strip_jsonp(code):
609a61e3 4616 return re.sub(
5552c9eb 4617 r'''(?sx)^
e9c671d5 4618 (?:window\.)?(?P<func_name>[a-zA-Z0-9_.$]*)
5552c9eb
YCH
4619 (?:\s*&&\s*(?P=func_name))?
4620 \s*\(\s*(?P<callback_data>.*)\);?
4621 \s*?(?://[^\n]*)*$''',
4622 r'\g<callback_data>', code)
478c2c61
PH
4623
4624
5c610515 4625def js_to_json(code, vars={}):
4626 # vars is a dict of var, val pairs to substitute
c843e685 4627 COMMENT_RE = r'/\*(?:(?!\*/).)*?\*/|//[^\n]*\n'
4195096e
S
4628 SKIP_RE = r'\s*(?:{comment})?\s*'.format(comment=COMMENT_RE)
4629 INTEGER_TABLE = (
4630 (r'(?s)^(0[xX][0-9a-fA-F]+){skip}:?$'.format(skip=SKIP_RE), 16),
4631 (r'(?s)^(0+[0-7]+){skip}:?$'.format(skip=SKIP_RE), 8),
4632 )
4633
e05f6939 4634 def fix_kv(m):
e7b6d122
PH
4635 v = m.group(0)
4636 if v in ('true', 'false', 'null'):
4637 return v
421ddcb8
C
4638 elif v in ('undefined', 'void 0'):
4639 return 'null'
8bdd16b4 4640 elif v.startswith('/*') or v.startswith('//') or v.startswith('!') or v == ',':
bd1e4844 4641 return ""
4642
4643 if v[0] in ("'", '"'):
4644 v = re.sub(r'(?s)\\.|"', lambda m: {
e7b6d122 4645 '"': '\\"',
bd1e4844 4646 "\\'": "'",
4647 '\\\n': '',
4648 '\\x': '\\u00',
4649 }.get(m.group(0), m.group(0)), v[1:-1])
8bdd16b4 4650 else:
4651 for regex, base in INTEGER_TABLE:
4652 im = re.match(regex, v)
4653 if im:
4654 i = int(im.group(1), base)
4655 return '"%d":' % i if v.endswith(':') else '%d' % i
89ac4a19 4656
5c610515 4657 if v in vars:
4658 return vars[v]
4659
e7b6d122 4660 return '"%s"' % v
e05f6939 4661
bd1e4844 4662 return re.sub(r'''(?sx)
4663 "(?:[^"\\]*(?:\\\\|\\['"nurtbfx/\n]))*[^"\\]*"|
4664 '(?:[^'\\]*(?:\\\\|\\['"nurtbfx/\n]))*[^'\\]*'|
4195096e 4665 {comment}|,(?={skip}[\]}}])|
421ddcb8 4666 void\s0|(?:(?<![0-9])[eE]|[a-df-zA-DF-Z_$])[.a-zA-Z_$0-9]*|
4195096e 4667 \b(?:0[xX][0-9a-fA-F]+|0+[0-7]+)(?:{skip}:)?|
8bdd16b4 4668 [0-9]+(?={skip}:)|
4669 !+
4195096e 4670 '''.format(comment=COMMENT_RE, skip=SKIP_RE), fix_kv, code)
e05f6939
PH
4671
4672
478c2c61
PH
4673def qualities(quality_ids):
4674 """ Get a numeric quality value out of a list of possible values """
4675 def q(qid):
4676 try:
4677 return quality_ids.index(qid)
4678 except ValueError:
4679 return -1
4680 return q
4681
acd69589 4682
ebed8b37 4683POSTPROCESS_WHEN = {'pre_process', 'before_dl', 'after_move', 'post_process', 'after_video', 'playlist'}
1e43a6f7 4684
4685
de6000d9 4686DEFAULT_OUTTMPL = {
4687 'default': '%(title)s [%(id)s].%(ext)s',
72755351 4688 'chapter': '%(title)s - %(section_number)03d %(section_title)s [%(id)s].%(ext)s',
de6000d9 4689}
4690OUTTMPL_TYPES = {
72755351 4691 'chapter': None,
de6000d9 4692 'subtitle': None,
4693 'thumbnail': None,
4694 'description': 'description',
4695 'annotation': 'annotations.xml',
4696 'infojson': 'info.json',
08438d2c 4697 'link': None,
3b603dbd 4698 'pl_video': None,
5112f26a 4699 'pl_thumbnail': None,
de6000d9 4700 'pl_description': 'description',
4701 'pl_infojson': 'info.json',
4702}
0a871f68 4703
143db31d 4704# As of [1] format syntax is:
4705# %[mapping_key][conversion_flags][minimum_width][.precision][length_modifier]type
4706# 1. https://docs.python.org/2/library/stdtypes.html#string-formatting
901130bb 4707STR_FORMAT_RE_TMPL = r'''(?x)
4708 (?<!%)(?P<prefix>(?:%%)*)
143db31d 4709 %
524e2e4f 4710 (?P<has_key>\((?P<key>{0})\))?
752cda38 4711 (?P<format>
524e2e4f 4712 (?P<conversion>[#0\-+ ]+)?
4713 (?P<min_width>\d+)?
4714 (?P<precision>\.\d+)?
4715 (?P<len_mod>[hlL])? # unused in python
901130bb 4716 {1} # conversion type
752cda38 4717 )
143db31d 4718'''
4719
7d1eb38a 4720
901130bb 4721STR_FORMAT_TYPES = 'diouxXeEfFgGcrs'
a020a0dc 4722
7d1eb38a 4723
a020a0dc
PH
4724def limit_length(s, length):
4725 """ Add ellipses to overly long strings """
4726 if s is None:
4727 return None
4728 ELLIPSES = '...'
4729 if len(s) > length:
4730 return s[:length - len(ELLIPSES)] + ELLIPSES
4731 return s
48844745
PH
4732
4733
4734def version_tuple(v):
5f9b8394 4735 return tuple(int(e) for e in re.split(r'[-.]', v))
48844745
PH
4736
4737
4738def is_outdated_version(version, limit, assume_new=True):
4739 if not version:
4740 return not assume_new
4741 try:
4742 return version_tuple(version) < version_tuple(limit)
4743 except ValueError:
4744 return not assume_new
732ea2f0
PH
4745
4746
4747def ytdl_is_updateable():
7a5c1cfe 4748 """ Returns if yt-dlp can be updated with -U """
735d865e 4749
5d535b4a 4750 from .update import is_non_updateable
732ea2f0 4751
5d535b4a 4752 return not is_non_updateable()
7d4111ed
PH
4753
4754
4755def args_to_str(args):
4756 # Get a short string representation for a subprocess command
702ccf2d 4757 return ' '.join(compat_shlex_quote(a) for a in args)
2ccd1b10
PH
4758
4759
9b9c5355 4760def error_to_compat_str(err):
fdae2358
S
4761 err_str = str(err)
4762 # On python 2 error byte string must be decoded with proper
4763 # encoding rather than ascii
4764 if sys.version_info[0] < 3:
4765 err_str = err_str.decode(preferredencoding())
4766 return err_str
4767
4768
c460bdd5 4769def mimetype2ext(mt):
eb9ee194
S
4770 if mt is None:
4771 return None
4772
9359f3d4
F
4773 mt, _, params = mt.partition(';')
4774 mt = mt.strip()
4775
4776 FULL_MAP = {
765ac263 4777 'audio/mp4': 'm4a',
6c33d24b
YCH
4778 # Per RFC 3003, audio/mpeg can be .mp1, .mp2 or .mp3. Here use .mp3 as
4779 # it's the most popular one
4780 'audio/mpeg': 'mp3',
ba39289d 4781 'audio/x-wav': 'wav',
9359f3d4
F
4782 'audio/wav': 'wav',
4783 'audio/wave': 'wav',
4784 }
4785
4786 ext = FULL_MAP.get(mt)
765ac263
JMF
4787 if ext is not None:
4788 return ext
4789
9359f3d4 4790 SUBTYPE_MAP = {
f6861ec9 4791 '3gpp': '3gp',
cafcf657 4792 'smptett+xml': 'tt',
cafcf657 4793 'ttaf+xml': 'dfxp',
a0d8d704 4794 'ttml+xml': 'ttml',
f6861ec9 4795 'x-flv': 'flv',
a0d8d704 4796 'x-mp4-fragmented': 'mp4',
d4f05d47 4797 'x-ms-sami': 'sami',
a0d8d704 4798 'x-ms-wmv': 'wmv',
b4173f15
RA
4799 'mpegurl': 'm3u8',
4800 'x-mpegurl': 'm3u8',
4801 'vnd.apple.mpegurl': 'm3u8',
4802 'dash+xml': 'mpd',
b4173f15 4803 'f4m+xml': 'f4m',
f164b971 4804 'hds+xml': 'f4m',
e910fe2f 4805 'vnd.ms-sstr+xml': 'ism',
c2b2c7e1 4806 'quicktime': 'mov',
98ce1a3f 4807 'mp2t': 'ts',
39e7107d 4808 'x-wav': 'wav',
9359f3d4
F
4809 'filmstrip+json': 'fs',
4810 'svg+xml': 'svg',
4811 }
4812
4813 _, _, subtype = mt.rpartition('/')
4814 ext = SUBTYPE_MAP.get(subtype.lower())
4815 if ext is not None:
4816 return ext
4817
4818 SUFFIX_MAP = {
4819 'json': 'json',
4820 'xml': 'xml',
4821 'zip': 'zip',
4822 'gzip': 'gz',
4823 }
4824
4825 _, _, suffix = subtype.partition('+')
4826 ext = SUFFIX_MAP.get(suffix)
4827 if ext is not None:
4828 return ext
4829
4830 return subtype.replace('+', '.')
c460bdd5
PH
4831
4832
2814f12b
THD
4833def ext2mimetype(ext_or_url):
4834 if not ext_or_url:
4835 return None
4836 if '.' not in ext_or_url:
4837 ext_or_url = f'file.{ext_or_url}'
4838 return mimetypes.guess_type(ext_or_url)[0]
4839
4840
4f3c5e06 4841def parse_codecs(codecs_str):
4842 # http://tools.ietf.org/html/rfc6381
4843 if not codecs_str:
4844 return {}
a0566bbf 4845 split_codecs = list(filter(None, map(
dbf5416a 4846 str.strip, codecs_str.strip().strip(',').split(','))))
4afa3ec4 4847 vcodec, acodec, tcodec, hdr = None, None, None, None
a0566bbf 4848 for full_codec in split_codecs:
9bd979ca 4849 parts = full_codec.split('.')
4850 codec = parts[0].replace('0', '')
4851 if codec in ('avc1', 'avc2', 'avc3', 'avc4', 'vp9', 'vp8', 'hev1', 'hev2',
4852 'h263', 'h264', 'mp4v', 'hvc1', 'av1', 'theora', 'dvh1', 'dvhe'):
4f3c5e06 4853 if not vcodec:
b69fd25c 4854 vcodec = '.'.join(parts[:4]) if codec in ('vp9', 'av1', 'hvc1') else full_codec
176f1866 4855 if codec in ('dvh1', 'dvhe'):
4856 hdr = 'DV'
9bd979ca 4857 elif codec == 'av1' and len(parts) > 3 and parts[3] == '10':
4858 hdr = 'HDR10'
4859 elif full_codec.replace('0', '').startswith('vp9.2'):
176f1866 4860 hdr = 'HDR10'
b69fd25c 4861 elif codec in ('flac', 'mp4a', 'opus', 'vorbis', 'mp3', 'aac', 'ac-3', 'ec-3', 'eac3', 'dtsc', 'dtse', 'dtsh', 'dtsl'):
4f3c5e06 4862 if not acodec:
4863 acodec = full_codec
4afa3ec4
F
4864 elif codec in ('stpp', 'wvtt',):
4865 if not tcodec:
4866 tcodec = full_codec
4f3c5e06 4867 else:
60f5c9fb 4868 write_string('WARNING: Unknown codec %s\n' % full_codec, sys.stderr)
4afa3ec4 4869 if vcodec or acodec or tcodec:
4f3c5e06 4870 return {
4871 'vcodec': vcodec or 'none',
4872 'acodec': acodec or 'none',
176f1866 4873 'dynamic_range': hdr,
4afa3ec4 4874 **({'tcodec': tcodec} if tcodec is not None else {}),
4f3c5e06 4875 }
b69fd25c 4876 elif len(split_codecs) == 2:
4877 return {
4878 'vcodec': split_codecs[0],
4879 'acodec': split_codecs[1],
4880 }
4f3c5e06 4881 return {}
4882
4883
2ccd1b10 4884def urlhandle_detect_ext(url_handle):
79298173 4885 getheader = url_handle.headers.get
2ccd1b10 4886
b55ee18f
PH
4887 cd = getheader('Content-Disposition')
4888 if cd:
4889 m = re.match(r'attachment;\s*filename="(?P<filename>[^"]+)"', cd)
4890 if m:
4891 e = determine_ext(m.group('filename'), default_ext=None)
4892 if e:
4893 return e
4894
c460bdd5 4895 return mimetype2ext(getheader('Content-Type'))
05900629
PH
4896
4897
1e399778
YCH
4898def encode_data_uri(data, mime_type):
4899 return 'data:%s;base64,%s' % (mime_type, base64.b64encode(data).decode('ascii'))
4900
4901
05900629 4902def age_restricted(content_limit, age_limit):
6ec6cb4e 4903 """ Returns True iff the content should be blocked """
05900629
PH
4904
4905 if age_limit is None: # No limit set
4906 return False
4907 if content_limit is None:
4908 return False # Content available for everyone
4909 return age_limit < content_limit
61ca9a80
PH
4910
4911
4912def is_html(first_bytes):
4913 """ Detect whether a file contains HTML by examining its first bytes. """
4914
4915 BOMS = [
4916 (b'\xef\xbb\xbf', 'utf-8'),
4917 (b'\x00\x00\xfe\xff', 'utf-32-be'),
4918 (b'\xff\xfe\x00\x00', 'utf-32-le'),
4919 (b'\xff\xfe', 'utf-16-le'),
4920 (b'\xfe\xff', 'utf-16-be'),
4921 ]
4922 for bom, enc in BOMS:
4923 if first_bytes.startswith(bom):
4924 s = first_bytes[len(bom):].decode(enc, 'replace')
4925 break
4926 else:
4927 s = first_bytes.decode('utf-8', 'replace')
4928
4929 return re.match(r'^\s*<', s)
a055469f
PH
4930
4931
4932def determine_protocol(info_dict):
4933 protocol = info_dict.get('protocol')
4934 if protocol is not None:
4935 return protocol
4936
7de837a5 4937 url = sanitize_url(info_dict['url'])
a055469f
PH
4938 if url.startswith('rtmp'):
4939 return 'rtmp'
4940 elif url.startswith('mms'):
4941 return 'mms'
4942 elif url.startswith('rtsp'):
4943 return 'rtsp'
4944
4945 ext = determine_ext(url)
4946 if ext == 'm3u8':
4947 return 'm3u8'
4948 elif ext == 'f4m':
4949 return 'f4m'
4950
4951 return compat_urllib_parse_urlparse(url).scheme
cfb56d1a
PH
4952
4953
c5e3f849 4954def render_table(header_row, data, delim=False, extra_gap=0, hide_empty=False):
4955 """ Render a list of rows, each as a list of values.
4956 Text after a \t will be right aligned """
ec11a9f4 4957 def width(string):
c5e3f849 4958 return len(remove_terminal_sequences(string).replace('\t', ''))
76d321f6 4959
4960 def get_max_lens(table):
ec11a9f4 4961 return [max(width(str(v)) for v in col) for col in zip(*table)]
76d321f6 4962
4963 def filter_using_list(row, filterArray):
4964 return [col for (take, col) in zip(filterArray, row) if take]
4965
c5e3f849 4966 if hide_empty:
76d321f6 4967 max_lens = get_max_lens(data)
4968 header_row = filter_using_list(header_row, max_lens)
4969 data = [filter_using_list(row, max_lens) for row in data]
4970
cfb56d1a 4971 table = [header_row] + data
76d321f6 4972 max_lens = get_max_lens(table)
c5e3f849 4973 extra_gap += 1
76d321f6 4974 if delim:
c5e3f849 4975 table = [header_row, [delim * (ml + extra_gap) for ml in max_lens]] + data
4976 table[1][-1] = table[1][-1][:-extra_gap] # Remove extra_gap from end of delimiter
ec11a9f4 4977 for row in table:
4978 for pos, text in enumerate(map(str, row)):
c5e3f849 4979 if '\t' in text:
4980 row[pos] = text.replace('\t', ' ' * (max_lens[pos] - width(text))) + ' ' * extra_gap
4981 else:
4982 row[pos] = text + ' ' * (max_lens[pos] - width(text) + extra_gap)
4983 ret = '\n'.join(''.join(row).rstrip() for row in table)
ec11a9f4 4984 return ret
347de493
PH
4985
4986
8f18aca8 4987def _match_one(filter_part, dct, incomplete):
77b87f05 4988 # TODO: Generalize code with YoutubeDL._build_format_filter
a047eeb6 4989 STRING_OPERATORS = {
4990 '*=': operator.contains,
4991 '^=': lambda attr, value: attr.startswith(value),
4992 '$=': lambda attr, value: attr.endswith(value),
4993 '~=': lambda attr, value: re.search(value, attr),
4994 }
347de493 4995 COMPARISON_OPERATORS = {
a047eeb6 4996 **STRING_OPERATORS,
4997 '<=': operator.le, # "<=" must be defined above "<"
347de493 4998 '<': operator.lt,
347de493 4999 '>=': operator.ge,
a047eeb6 5000 '>': operator.gt,
347de493 5001 '=': operator.eq,
347de493 5002 }
a047eeb6 5003
347de493
PH
5004 operator_rex = re.compile(r'''(?x)\s*
5005 (?P<key>[a-z_]+)
77b87f05 5006 \s*(?P<negation>!\s*)?(?P<op>%s)(?P<none_inclusive>\s*\?)?\s*
347de493 5007 (?:
a047eeb6 5008 (?P<quote>["\'])(?P<quotedstrval>.+?)(?P=quote)|
5009 (?P<strval>.+?)
347de493
PH
5010 )
5011 \s*$
5012 ''' % '|'.join(map(re.escape, COMPARISON_OPERATORS.keys())))
5013 m = operator_rex.search(filter_part)
5014 if m:
18f96d12 5015 m = m.groupdict()
5016 unnegated_op = COMPARISON_OPERATORS[m['op']]
5017 if m['negation']:
77b87f05
MT
5018 op = lambda attr, value: not unnegated_op(attr, value)
5019 else:
5020 op = unnegated_op
18f96d12 5021 comparison_value = m['quotedstrval'] or m['strval'] or m['intval']
5022 if m['quote']:
5023 comparison_value = comparison_value.replace(r'\%s' % m['quote'], m['quote'])
5024 actual_value = dct.get(m['key'])
5025 numeric_comparison = None
5026 if isinstance(actual_value, compat_numeric_types):
e5a088dc
S
5027 # If the original field is a string and matching comparisonvalue is
5028 # a number we should respect the origin of the original field
5029 # and process comparison value as a string (see
18f96d12 5030 # https://github.com/ytdl-org/youtube-dl/issues/11082)
347de493 5031 try:
18f96d12 5032 numeric_comparison = int(comparison_value)
347de493 5033 except ValueError:
18f96d12 5034 numeric_comparison = parse_filesize(comparison_value)
5035 if numeric_comparison is None:
5036 numeric_comparison = parse_filesize(f'{comparison_value}B')
5037 if numeric_comparison is None:
5038 numeric_comparison = parse_duration(comparison_value)
5039 if numeric_comparison is not None and m['op'] in STRING_OPERATORS:
5040 raise ValueError('Operator %s only supports string values!' % m['op'])
347de493 5041 if actual_value is None:
18f96d12 5042 return incomplete or m['none_inclusive']
5043 return op(actual_value, comparison_value if numeric_comparison is None else numeric_comparison)
347de493
PH
5044
5045 UNARY_OPERATORS = {
1cc47c66
S
5046 '': lambda v: (v is True) if isinstance(v, bool) else (v is not None),
5047 '!': lambda v: (v is False) if isinstance(v, bool) else (v is None),
347de493
PH
5048 }
5049 operator_rex = re.compile(r'''(?x)\s*
5050 (?P<op>%s)\s*(?P<key>[a-z_]+)
5051 \s*$
5052 ''' % '|'.join(map(re.escape, UNARY_OPERATORS.keys())))
5053 m = operator_rex.search(filter_part)
5054 if m:
5055 op = UNARY_OPERATORS[m.group('op')]
5056 actual_value = dct.get(m.group('key'))
8f18aca8 5057 if incomplete and actual_value is None:
5058 return True
347de493
PH
5059 return op(actual_value)
5060
5061 raise ValueError('Invalid filter part %r' % filter_part)
5062
5063
8f18aca8 5064def match_str(filter_str, dct, incomplete=False):
5065 """ Filter a dictionary with a simple string syntax. Returns True (=passes filter) or false
5066 When incomplete, all conditions passes on missing fields
5067 """
347de493 5068 return all(
8f18aca8 5069 _match_one(filter_part.replace(r'\&', '&'), dct, incomplete)
a047eeb6 5070 for filter_part in re.split(r'(?<!\\)&', filter_str))
347de493
PH
5071
5072
5073def match_filter_func(filter_str):
8f18aca8 5074 def _match_func(info_dict, *args, **kwargs):
5075 if match_str(filter_str, info_dict, *args, **kwargs):
347de493
PH
5076 return None
5077 else:
5078 video_title = info_dict.get('title', info_dict.get('id', 'video'))
5079 return '%s does not pass filter %s, skipping ..' % (video_title, filter_str)
5080 return _match_func
91410c9b
PH
5081
5082
bf6427d2
YCH
5083def parse_dfxp_time_expr(time_expr):
5084 if not time_expr:
d631d5f9 5085 return
bf6427d2
YCH
5086
5087 mobj = re.match(r'^(?P<time_offset>\d+(?:\.\d+)?)s?$', time_expr)
5088 if mobj:
5089 return float(mobj.group('time_offset'))
5090
db2fe38b 5091 mobj = re.match(r'^(\d+):(\d\d):(\d\d(?:(?:\.|:)\d+)?)$', time_expr)
bf6427d2 5092 if mobj:
db2fe38b 5093 return 3600 * int(mobj.group(1)) + 60 * int(mobj.group(2)) + float(mobj.group(3).replace(':', '.'))
bf6427d2
YCH
5094
5095
c1c924ab 5096def srt_subtitles_timecode(seconds):
aa7785f8 5097 return '%02d:%02d:%02d,%03d' % timetuple_from_msec(seconds * 1000)
5098
5099
5100def ass_subtitles_timecode(seconds):
5101 time = timetuple_from_msec(seconds * 1000)
5102 return '%01d:%02d:%02d.%02d' % (*time[:-1], time.milliseconds / 10)
bf6427d2
YCH
5103
5104
5105def dfxp2srt(dfxp_data):
3869028f
YCH
5106 '''
5107 @param dfxp_data A bytes-like object containing DFXP data
5108 @returns A unicode object containing converted SRT data
5109 '''
5b995f71 5110 LEGACY_NAMESPACES = (
3869028f
YCH
5111 (b'http://www.w3.org/ns/ttml', [
5112 b'http://www.w3.org/2004/11/ttaf1',
5113 b'http://www.w3.org/2006/04/ttaf1',
5114 b'http://www.w3.org/2006/10/ttaf1',
5b995f71 5115 ]),
3869028f
YCH
5116 (b'http://www.w3.org/ns/ttml#styling', [
5117 b'http://www.w3.org/ns/ttml#style',
5b995f71
RA
5118 ]),
5119 )
5120
5121 SUPPORTED_STYLING = [
5122 'color',
5123 'fontFamily',
5124 'fontSize',
5125 'fontStyle',
5126 'fontWeight',
5127 'textDecoration'
5128 ]
5129
4e335771 5130 _x = functools.partial(xpath_with_ns, ns_map={
261f4730 5131 'xml': 'http://www.w3.org/XML/1998/namespace',
4e335771 5132 'ttml': 'http://www.w3.org/ns/ttml',
5b995f71 5133 'tts': 'http://www.w3.org/ns/ttml#styling',
4e335771 5134 })
bf6427d2 5135
5b995f71
RA
5136 styles = {}
5137 default_style = {}
5138
87de7069 5139 class TTMLPElementParser(object):
5b995f71
RA
5140 _out = ''
5141 _unclosed_elements = []
5142 _applied_styles = []
bf6427d2 5143
2b14cb56 5144 def start(self, tag, attrib):
5b995f71
RA
5145 if tag in (_x('ttml:br'), 'br'):
5146 self._out += '\n'
5147 else:
5148 unclosed_elements = []
5149 style = {}
5150 element_style_id = attrib.get('style')
5151 if default_style:
5152 style.update(default_style)
5153 if element_style_id:
5154 style.update(styles.get(element_style_id, {}))
5155 for prop in SUPPORTED_STYLING:
5156 prop_val = attrib.get(_x('tts:' + prop))
5157 if prop_val:
5158 style[prop] = prop_val
5159 if style:
5160 font = ''
5161 for k, v in sorted(style.items()):
5162 if self._applied_styles and self._applied_styles[-1].get(k) == v:
5163 continue
5164 if k == 'color':
5165 font += ' color="%s"' % v
5166 elif k == 'fontSize':
5167 font += ' size="%s"' % v
5168 elif k == 'fontFamily':
5169 font += ' face="%s"' % v
5170 elif k == 'fontWeight' and v == 'bold':
5171 self._out += '<b>'
5172 unclosed_elements.append('b')
5173 elif k == 'fontStyle' and v == 'italic':
5174 self._out += '<i>'
5175 unclosed_elements.append('i')
5176 elif k == 'textDecoration' and v == 'underline':
5177 self._out += '<u>'
5178 unclosed_elements.append('u')
5179 if font:
5180 self._out += '<font' + font + '>'
5181 unclosed_elements.append('font')
5182 applied_style = {}
5183 if self._applied_styles:
5184 applied_style.update(self._applied_styles[-1])
5185 applied_style.update(style)
5186 self._applied_styles.append(applied_style)
5187 self._unclosed_elements.append(unclosed_elements)
bf6427d2 5188
2b14cb56 5189 def end(self, tag):
5b995f71
RA
5190 if tag not in (_x('ttml:br'), 'br'):
5191 unclosed_elements = self._unclosed_elements.pop()
5192 for element in reversed(unclosed_elements):
5193 self._out += '</%s>' % element
5194 if unclosed_elements and self._applied_styles:
5195 self._applied_styles.pop()
bf6427d2 5196
2b14cb56 5197 def data(self, data):
5b995f71 5198 self._out += data
2b14cb56 5199
5200 def close(self):
5b995f71 5201 return self._out.strip()
2b14cb56 5202
5203 def parse_node(node):
5204 target = TTMLPElementParser()
5205 parser = xml.etree.ElementTree.XMLParser(target=target)
5206 parser.feed(xml.etree.ElementTree.tostring(node))
5207 return parser.close()
bf6427d2 5208
5b995f71
RA
5209 for k, v in LEGACY_NAMESPACES:
5210 for ns in v:
5211 dfxp_data = dfxp_data.replace(ns, k)
5212
3869028f 5213 dfxp = compat_etree_fromstring(dfxp_data)
bf6427d2 5214 out = []
5b995f71 5215 paras = dfxp.findall(_x('.//ttml:p')) or dfxp.findall('.//p')
1b0427e6
YCH
5216
5217 if not paras:
5218 raise ValueError('Invalid dfxp/TTML subtitle')
bf6427d2 5219
5b995f71
RA
5220 repeat = False
5221 while True:
5222 for style in dfxp.findall(_x('.//ttml:style')):
261f4730
RA
5223 style_id = style.get('id') or style.get(_x('xml:id'))
5224 if not style_id:
5225 continue
5b995f71
RA
5226 parent_style_id = style.get('style')
5227 if parent_style_id:
5228 if parent_style_id not in styles:
5229 repeat = True
5230 continue
5231 styles[style_id] = styles[parent_style_id].copy()
5232 for prop in SUPPORTED_STYLING:
5233 prop_val = style.get(_x('tts:' + prop))
5234 if prop_val:
5235 styles.setdefault(style_id, {})[prop] = prop_val
5236 if repeat:
5237 repeat = False
5238 else:
5239 break
5240
5241 for p in ('body', 'div'):
5242 ele = xpath_element(dfxp, [_x('.//ttml:' + p), './/' + p])
5243 if ele is None:
5244 continue
5245 style = styles.get(ele.get('style'))
5246 if not style:
5247 continue
5248 default_style.update(style)
5249
bf6427d2 5250 for para, index in zip(paras, itertools.count(1)):
d631d5f9 5251 begin_time = parse_dfxp_time_expr(para.attrib.get('begin'))
7dff0363 5252 end_time = parse_dfxp_time_expr(para.attrib.get('end'))
d631d5f9
YCH
5253 dur = parse_dfxp_time_expr(para.attrib.get('dur'))
5254 if begin_time is None:
5255 continue
7dff0363 5256 if not end_time:
d631d5f9
YCH
5257 if not dur:
5258 continue
5259 end_time = begin_time + dur
bf6427d2
YCH
5260 out.append('%d\n%s --> %s\n%s\n\n' % (
5261 index,
c1c924ab
YCH
5262 srt_subtitles_timecode(begin_time),
5263 srt_subtitles_timecode(end_time),
bf6427d2
YCH
5264 parse_node(para)))
5265
5266 return ''.join(out)
5267
5268
66e289ba
S
5269def cli_option(params, command_option, param):
5270 param = params.get(param)
98e698f1
RA
5271 if param:
5272 param = compat_str(param)
66e289ba
S
5273 return [command_option, param] if param is not None else []
5274
5275
5276def cli_bool_option(params, command_option, param, true_value='true', false_value='false', separator=None):
5277 param = params.get(param)
5b232f46
S
5278 if param is None:
5279 return []
66e289ba
S
5280 assert isinstance(param, bool)
5281 if separator:
5282 return [command_option + separator + (true_value if param else false_value)]
5283 return [command_option, true_value if param else false_value]
5284
5285
5286def cli_valueless_option(params, command_option, param, expected_value=True):
5287 param = params.get(param)
5288 return [command_option] if param == expected_value else []
5289
5290
e92caff5 5291def cli_configuration_args(argdict, keys, default=[], use_compat=True):
eab9b2bc 5292 if isinstance(argdict, (list, tuple)): # for backward compatibility
e92caff5 5293 if use_compat:
5b1ecbb3 5294 return argdict
5295 else:
5296 argdict = None
eab9b2bc 5297 if argdict is None:
5b1ecbb3 5298 return default
eab9b2bc 5299 assert isinstance(argdict, dict)
5300
e92caff5 5301 assert isinstance(keys, (list, tuple))
5302 for key_list in keys:
e92caff5 5303 arg_list = list(filter(
5304 lambda x: x is not None,
6606817a 5305 [argdict.get(key.lower()) for key in variadic(key_list)]))
e92caff5 5306 if arg_list:
5307 return [arg for args in arg_list for arg in args]
5308 return default
66e289ba 5309
6251555f 5310
330690a2 5311def _configuration_args(main_key, argdict, exe, keys=None, default=[], use_compat=True):
5312 main_key, exe = main_key.lower(), exe.lower()
5313 root_key = exe if main_key == exe else f'{main_key}+{exe}'
5314 keys = [f'{root_key}{k}' for k in (keys or [''])]
5315 if root_key in keys:
5316 if main_key != exe:
5317 keys.append((main_key, exe))
5318 keys.append('default')
5319 else:
5320 use_compat = False
5321 return cli_configuration_args(argdict, keys, default, use_compat)
5322
66e289ba 5323
39672624
YCH
5324class ISO639Utils(object):
5325 # See http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt
5326 _lang_map = {
5327 'aa': 'aar',
5328 'ab': 'abk',
5329 'ae': 'ave',
5330 'af': 'afr',
5331 'ak': 'aka',
5332 'am': 'amh',
5333 'an': 'arg',
5334 'ar': 'ara',
5335 'as': 'asm',
5336 'av': 'ava',
5337 'ay': 'aym',
5338 'az': 'aze',
5339 'ba': 'bak',
5340 'be': 'bel',
5341 'bg': 'bul',
5342 'bh': 'bih',
5343 'bi': 'bis',
5344 'bm': 'bam',
5345 'bn': 'ben',
5346 'bo': 'bod',
5347 'br': 'bre',
5348 'bs': 'bos',
5349 'ca': 'cat',
5350 'ce': 'che',
5351 'ch': 'cha',
5352 'co': 'cos',
5353 'cr': 'cre',
5354 'cs': 'ces',
5355 'cu': 'chu',
5356 'cv': 'chv',
5357 'cy': 'cym',
5358 'da': 'dan',
5359 'de': 'deu',
5360 'dv': 'div',
5361 'dz': 'dzo',
5362 'ee': 'ewe',
5363 'el': 'ell',
5364 'en': 'eng',
5365 'eo': 'epo',
5366 'es': 'spa',
5367 'et': 'est',
5368 'eu': 'eus',
5369 'fa': 'fas',
5370 'ff': 'ful',
5371 'fi': 'fin',
5372 'fj': 'fij',
5373 'fo': 'fao',
5374 'fr': 'fra',
5375 'fy': 'fry',
5376 'ga': 'gle',
5377 'gd': 'gla',
5378 'gl': 'glg',
5379 'gn': 'grn',
5380 'gu': 'guj',
5381 'gv': 'glv',
5382 'ha': 'hau',
5383 'he': 'heb',
b7acc835 5384 'iw': 'heb', # Replaced by he in 1989 revision
39672624
YCH
5385 'hi': 'hin',
5386 'ho': 'hmo',
5387 'hr': 'hrv',
5388 'ht': 'hat',
5389 'hu': 'hun',
5390 'hy': 'hye',
5391 'hz': 'her',
5392 'ia': 'ina',
5393 'id': 'ind',
b7acc835 5394 'in': 'ind', # Replaced by id in 1989 revision
39672624
YCH
5395 'ie': 'ile',
5396 'ig': 'ibo',
5397 'ii': 'iii',
5398 'ik': 'ipk',
5399 'io': 'ido',
5400 'is': 'isl',
5401 'it': 'ita',
5402 'iu': 'iku',
5403 'ja': 'jpn',
5404 'jv': 'jav',
5405 'ka': 'kat',
5406 'kg': 'kon',
5407 'ki': 'kik',
5408 'kj': 'kua',
5409 'kk': 'kaz',
5410 'kl': 'kal',
5411 'km': 'khm',
5412 'kn': 'kan',
5413 'ko': 'kor',
5414 'kr': 'kau',
5415 'ks': 'kas',
5416 'ku': 'kur',
5417 'kv': 'kom',
5418 'kw': 'cor',
5419 'ky': 'kir',
5420 'la': 'lat',
5421 'lb': 'ltz',
5422 'lg': 'lug',
5423 'li': 'lim',
5424 'ln': 'lin',
5425 'lo': 'lao',
5426 'lt': 'lit',
5427 'lu': 'lub',
5428 'lv': 'lav',
5429 'mg': 'mlg',
5430 'mh': 'mah',
5431 'mi': 'mri',
5432 'mk': 'mkd',
5433 'ml': 'mal',
5434 'mn': 'mon',
5435 'mr': 'mar',
5436 'ms': 'msa',
5437 'mt': 'mlt',
5438 'my': 'mya',
5439 'na': 'nau',
5440 'nb': 'nob',
5441 'nd': 'nde',
5442 'ne': 'nep',
5443 'ng': 'ndo',
5444 'nl': 'nld',
5445 'nn': 'nno',
5446 'no': 'nor',
5447 'nr': 'nbl',
5448 'nv': 'nav',
5449 'ny': 'nya',
5450 'oc': 'oci',
5451 'oj': 'oji',
5452 'om': 'orm',
5453 'or': 'ori',
5454 'os': 'oss',
5455 'pa': 'pan',
5456 'pi': 'pli',
5457 'pl': 'pol',
5458 'ps': 'pus',
5459 'pt': 'por',
5460 'qu': 'que',
5461 'rm': 'roh',
5462 'rn': 'run',
5463 'ro': 'ron',
5464 'ru': 'rus',
5465 'rw': 'kin',
5466 'sa': 'san',
5467 'sc': 'srd',
5468 'sd': 'snd',
5469 'se': 'sme',
5470 'sg': 'sag',
5471 'si': 'sin',
5472 'sk': 'slk',
5473 'sl': 'slv',
5474 'sm': 'smo',
5475 'sn': 'sna',
5476 'so': 'som',
5477 'sq': 'sqi',
5478 'sr': 'srp',
5479 'ss': 'ssw',
5480 'st': 'sot',
5481 'su': 'sun',
5482 'sv': 'swe',
5483 'sw': 'swa',
5484 'ta': 'tam',
5485 'te': 'tel',
5486 'tg': 'tgk',
5487 'th': 'tha',
5488 'ti': 'tir',
5489 'tk': 'tuk',
5490 'tl': 'tgl',
5491 'tn': 'tsn',
5492 'to': 'ton',
5493 'tr': 'tur',
5494 'ts': 'tso',
5495 'tt': 'tat',
5496 'tw': 'twi',
5497 'ty': 'tah',
5498 'ug': 'uig',
5499 'uk': 'ukr',
5500 'ur': 'urd',
5501 'uz': 'uzb',
5502 've': 'ven',
5503 'vi': 'vie',
5504 'vo': 'vol',
5505 'wa': 'wln',
5506 'wo': 'wol',
5507 'xh': 'xho',
5508 'yi': 'yid',
e9a50fba 5509 'ji': 'yid', # Replaced by yi in 1989 revision
39672624
YCH
5510 'yo': 'yor',
5511 'za': 'zha',
5512 'zh': 'zho',
5513 'zu': 'zul',
5514 }
5515
5516 @classmethod
5517 def short2long(cls, code):
5518 """Convert language code from ISO 639-1 to ISO 639-2/T"""
5519 return cls._lang_map.get(code[:2])
5520
5521 @classmethod
5522 def long2short(cls, code):
5523 """Convert language code from ISO 639-2/T to ISO 639-1"""
5524 for short_name, long_name in cls._lang_map.items():
5525 if long_name == code:
5526 return short_name
5527
5528
4eb10f66
YCH
5529class ISO3166Utils(object):
5530 # From http://data.okfn.org/data/core/country-list
5531 _country_map = {
5532 'AF': 'Afghanistan',
5533 'AX': 'Åland Islands',
5534 'AL': 'Albania',
5535 'DZ': 'Algeria',
5536 'AS': 'American Samoa',
5537 'AD': 'Andorra',
5538 'AO': 'Angola',
5539 'AI': 'Anguilla',
5540 'AQ': 'Antarctica',
5541 'AG': 'Antigua and Barbuda',
5542 'AR': 'Argentina',
5543 'AM': 'Armenia',
5544 'AW': 'Aruba',
5545 'AU': 'Australia',
5546 'AT': 'Austria',
5547 'AZ': 'Azerbaijan',
5548 'BS': 'Bahamas',
5549 'BH': 'Bahrain',
5550 'BD': 'Bangladesh',
5551 'BB': 'Barbados',
5552 'BY': 'Belarus',
5553 'BE': 'Belgium',
5554 'BZ': 'Belize',
5555 'BJ': 'Benin',
5556 'BM': 'Bermuda',
5557 'BT': 'Bhutan',
5558 'BO': 'Bolivia, Plurinational State of',
5559 'BQ': 'Bonaire, Sint Eustatius and Saba',
5560 'BA': 'Bosnia and Herzegovina',
5561 'BW': 'Botswana',
5562 'BV': 'Bouvet Island',
5563 'BR': 'Brazil',
5564 'IO': 'British Indian Ocean Territory',
5565 'BN': 'Brunei Darussalam',
5566 'BG': 'Bulgaria',
5567 'BF': 'Burkina Faso',
5568 'BI': 'Burundi',
5569 'KH': 'Cambodia',
5570 'CM': 'Cameroon',
5571 'CA': 'Canada',
5572 'CV': 'Cape Verde',
5573 'KY': 'Cayman Islands',
5574 'CF': 'Central African Republic',
5575 'TD': 'Chad',
5576 'CL': 'Chile',
5577 'CN': 'China',
5578 'CX': 'Christmas Island',
5579 'CC': 'Cocos (Keeling) Islands',
5580 'CO': 'Colombia',
5581 'KM': 'Comoros',
5582 'CG': 'Congo',
5583 'CD': 'Congo, the Democratic Republic of the',
5584 'CK': 'Cook Islands',
5585 'CR': 'Costa Rica',
5586 'CI': 'Côte d\'Ivoire',
5587 'HR': 'Croatia',
5588 'CU': 'Cuba',
5589 'CW': 'Curaçao',
5590 'CY': 'Cyprus',
5591 'CZ': 'Czech Republic',
5592 'DK': 'Denmark',
5593 'DJ': 'Djibouti',
5594 'DM': 'Dominica',
5595 'DO': 'Dominican Republic',
5596 'EC': 'Ecuador',
5597 'EG': 'Egypt',
5598 'SV': 'El Salvador',
5599 'GQ': 'Equatorial Guinea',
5600 'ER': 'Eritrea',
5601 'EE': 'Estonia',
5602 'ET': 'Ethiopia',
5603 'FK': 'Falkland Islands (Malvinas)',
5604 'FO': 'Faroe Islands',
5605 'FJ': 'Fiji',
5606 'FI': 'Finland',
5607 'FR': 'France',
5608 'GF': 'French Guiana',
5609 'PF': 'French Polynesia',
5610 'TF': 'French Southern Territories',
5611 'GA': 'Gabon',
5612 'GM': 'Gambia',
5613 'GE': 'Georgia',
5614 'DE': 'Germany',
5615 'GH': 'Ghana',
5616 'GI': 'Gibraltar',
5617 'GR': 'Greece',
5618 'GL': 'Greenland',
5619 'GD': 'Grenada',
5620 'GP': 'Guadeloupe',
5621 'GU': 'Guam',
5622 'GT': 'Guatemala',
5623 'GG': 'Guernsey',
5624 'GN': 'Guinea',
5625 'GW': 'Guinea-Bissau',
5626 'GY': 'Guyana',
5627 'HT': 'Haiti',
5628 'HM': 'Heard Island and McDonald Islands',
5629 'VA': 'Holy See (Vatican City State)',
5630 'HN': 'Honduras',
5631 'HK': 'Hong Kong',
5632 'HU': 'Hungary',
5633 'IS': 'Iceland',
5634 'IN': 'India',
5635 'ID': 'Indonesia',
5636 'IR': 'Iran, Islamic Republic of',
5637 'IQ': 'Iraq',
5638 'IE': 'Ireland',
5639 'IM': 'Isle of Man',
5640 'IL': 'Israel',
5641 'IT': 'Italy',
5642 'JM': 'Jamaica',
5643 'JP': 'Japan',
5644 'JE': 'Jersey',
5645 'JO': 'Jordan',
5646 'KZ': 'Kazakhstan',
5647 'KE': 'Kenya',
5648 'KI': 'Kiribati',
5649 'KP': 'Korea, Democratic People\'s Republic of',
5650 'KR': 'Korea, Republic of',
5651 'KW': 'Kuwait',
5652 'KG': 'Kyrgyzstan',
5653 'LA': 'Lao People\'s Democratic Republic',
5654 'LV': 'Latvia',
5655 'LB': 'Lebanon',
5656 'LS': 'Lesotho',
5657 'LR': 'Liberia',
5658 'LY': 'Libya',
5659 'LI': 'Liechtenstein',
5660 'LT': 'Lithuania',
5661 'LU': 'Luxembourg',
5662 'MO': 'Macao',
5663 'MK': 'Macedonia, the Former Yugoslav Republic of',
5664 'MG': 'Madagascar',
5665 'MW': 'Malawi',
5666 'MY': 'Malaysia',
5667 'MV': 'Maldives',
5668 'ML': 'Mali',
5669 'MT': 'Malta',
5670 'MH': 'Marshall Islands',
5671 'MQ': 'Martinique',
5672 'MR': 'Mauritania',
5673 'MU': 'Mauritius',
5674 'YT': 'Mayotte',
5675 'MX': 'Mexico',
5676 'FM': 'Micronesia, Federated States of',
5677 'MD': 'Moldova, Republic of',
5678 'MC': 'Monaco',
5679 'MN': 'Mongolia',
5680 'ME': 'Montenegro',
5681 'MS': 'Montserrat',
5682 'MA': 'Morocco',
5683 'MZ': 'Mozambique',
5684 'MM': 'Myanmar',
5685 'NA': 'Namibia',
5686 'NR': 'Nauru',
5687 'NP': 'Nepal',
5688 'NL': 'Netherlands',
5689 'NC': 'New Caledonia',
5690 'NZ': 'New Zealand',
5691 'NI': 'Nicaragua',
5692 'NE': 'Niger',
5693 'NG': 'Nigeria',
5694 'NU': 'Niue',
5695 'NF': 'Norfolk Island',
5696 'MP': 'Northern Mariana Islands',
5697 'NO': 'Norway',
5698 'OM': 'Oman',
5699 'PK': 'Pakistan',
5700 'PW': 'Palau',
5701 'PS': 'Palestine, State of',
5702 'PA': 'Panama',
5703 'PG': 'Papua New Guinea',
5704 'PY': 'Paraguay',
5705 'PE': 'Peru',
5706 'PH': 'Philippines',
5707 'PN': 'Pitcairn',
5708 'PL': 'Poland',
5709 'PT': 'Portugal',
5710 'PR': 'Puerto Rico',
5711 'QA': 'Qatar',
5712 'RE': 'Réunion',
5713 'RO': 'Romania',
5714 'RU': 'Russian Federation',
5715 'RW': 'Rwanda',
5716 'BL': 'Saint Barthélemy',
5717 'SH': 'Saint Helena, Ascension and Tristan da Cunha',
5718 'KN': 'Saint Kitts and Nevis',
5719 'LC': 'Saint Lucia',
5720 'MF': 'Saint Martin (French part)',
5721 'PM': 'Saint Pierre and Miquelon',
5722 'VC': 'Saint Vincent and the Grenadines',
5723 'WS': 'Samoa',
5724 'SM': 'San Marino',
5725 'ST': 'Sao Tome and Principe',
5726 'SA': 'Saudi Arabia',
5727 'SN': 'Senegal',
5728 'RS': 'Serbia',
5729 'SC': 'Seychelles',
5730 'SL': 'Sierra Leone',
5731 'SG': 'Singapore',
5732 'SX': 'Sint Maarten (Dutch part)',
5733 'SK': 'Slovakia',
5734 'SI': 'Slovenia',
5735 'SB': 'Solomon Islands',
5736 'SO': 'Somalia',
5737 'ZA': 'South Africa',
5738 'GS': 'South Georgia and the South Sandwich Islands',
5739 'SS': 'South Sudan',
5740 'ES': 'Spain',
5741 'LK': 'Sri Lanka',
5742 'SD': 'Sudan',
5743 'SR': 'Suriname',
5744 'SJ': 'Svalbard and Jan Mayen',
5745 'SZ': 'Swaziland',
5746 'SE': 'Sweden',
5747 'CH': 'Switzerland',
5748 'SY': 'Syrian Arab Republic',
5749 'TW': 'Taiwan, Province of China',
5750 'TJ': 'Tajikistan',
5751 'TZ': 'Tanzania, United Republic of',
5752 'TH': 'Thailand',
5753 'TL': 'Timor-Leste',
5754 'TG': 'Togo',
5755 'TK': 'Tokelau',
5756 'TO': 'Tonga',
5757 'TT': 'Trinidad and Tobago',
5758 'TN': 'Tunisia',
5759 'TR': 'Turkey',
5760 'TM': 'Turkmenistan',
5761 'TC': 'Turks and Caicos Islands',
5762 'TV': 'Tuvalu',
5763 'UG': 'Uganda',
5764 'UA': 'Ukraine',
5765 'AE': 'United Arab Emirates',
5766 'GB': 'United Kingdom',
5767 'US': 'United States',
5768 'UM': 'United States Minor Outlying Islands',
5769 'UY': 'Uruguay',
5770 'UZ': 'Uzbekistan',
5771 'VU': 'Vanuatu',
5772 'VE': 'Venezuela, Bolivarian Republic of',
5773 'VN': 'Viet Nam',
5774 'VG': 'Virgin Islands, British',
5775 'VI': 'Virgin Islands, U.S.',
5776 'WF': 'Wallis and Futuna',
5777 'EH': 'Western Sahara',
5778 'YE': 'Yemen',
5779 'ZM': 'Zambia',
5780 'ZW': 'Zimbabwe',
5781 }
5782
5783 @classmethod
5784 def short2full(cls, code):
5785 """Convert an ISO 3166-2 country code to the corresponding full name"""
5786 return cls._country_map.get(code.upper())
5787
5788
773f291d
S
5789class GeoUtils(object):
5790 # Major IPv4 address blocks per country
5791 _country_ip_map = {
53896ca5 5792 'AD': '46.172.224.0/19',
773f291d
S
5793 'AE': '94.200.0.0/13',
5794 'AF': '149.54.0.0/17',
5795 'AG': '209.59.64.0/18',
5796 'AI': '204.14.248.0/21',
5797 'AL': '46.99.0.0/16',
5798 'AM': '46.70.0.0/15',
5799 'AO': '105.168.0.0/13',
53896ca5
S
5800 'AP': '182.50.184.0/21',
5801 'AQ': '23.154.160.0/24',
773f291d
S
5802 'AR': '181.0.0.0/12',
5803 'AS': '202.70.112.0/20',
53896ca5 5804 'AT': '77.116.0.0/14',
773f291d
S
5805 'AU': '1.128.0.0/11',
5806 'AW': '181.41.0.0/18',
53896ca5
S
5807 'AX': '185.217.4.0/22',
5808 'AZ': '5.197.0.0/16',
773f291d
S
5809 'BA': '31.176.128.0/17',
5810 'BB': '65.48.128.0/17',
5811 'BD': '114.130.0.0/16',
5812 'BE': '57.0.0.0/8',
53896ca5 5813 'BF': '102.178.0.0/15',
773f291d
S
5814 'BG': '95.42.0.0/15',
5815 'BH': '37.131.0.0/17',
5816 'BI': '154.117.192.0/18',
5817 'BJ': '137.255.0.0/16',
53896ca5 5818 'BL': '185.212.72.0/23',
773f291d
S
5819 'BM': '196.12.64.0/18',
5820 'BN': '156.31.0.0/16',
5821 'BO': '161.56.0.0/16',
5822 'BQ': '161.0.80.0/20',
53896ca5 5823 'BR': '191.128.0.0/12',
773f291d
S
5824 'BS': '24.51.64.0/18',
5825 'BT': '119.2.96.0/19',
5826 'BW': '168.167.0.0/16',
5827 'BY': '178.120.0.0/13',
5828 'BZ': '179.42.192.0/18',
5829 'CA': '99.224.0.0/11',
5830 'CD': '41.243.0.0/16',
53896ca5
S
5831 'CF': '197.242.176.0/21',
5832 'CG': '160.113.0.0/16',
773f291d 5833 'CH': '85.0.0.0/13',
53896ca5 5834 'CI': '102.136.0.0/14',
773f291d
S
5835 'CK': '202.65.32.0/19',
5836 'CL': '152.172.0.0/14',
53896ca5 5837 'CM': '102.244.0.0/14',
773f291d
S
5838 'CN': '36.128.0.0/10',
5839 'CO': '181.240.0.0/12',
5840 'CR': '201.192.0.0/12',
5841 'CU': '152.206.0.0/15',
5842 'CV': '165.90.96.0/19',
5843 'CW': '190.88.128.0/17',
53896ca5 5844 'CY': '31.153.0.0/16',
773f291d
S
5845 'CZ': '88.100.0.0/14',
5846 'DE': '53.0.0.0/8',
5847 'DJ': '197.241.0.0/17',
5848 'DK': '87.48.0.0/12',
5849 'DM': '192.243.48.0/20',
5850 'DO': '152.166.0.0/15',
5851 'DZ': '41.96.0.0/12',
5852 'EC': '186.68.0.0/15',
5853 'EE': '90.190.0.0/15',
5854 'EG': '156.160.0.0/11',
5855 'ER': '196.200.96.0/20',
5856 'ES': '88.0.0.0/11',
5857 'ET': '196.188.0.0/14',
5858 'EU': '2.16.0.0/13',
5859 'FI': '91.152.0.0/13',
5860 'FJ': '144.120.0.0/16',
53896ca5 5861 'FK': '80.73.208.0/21',
773f291d
S
5862 'FM': '119.252.112.0/20',
5863 'FO': '88.85.32.0/19',
5864 'FR': '90.0.0.0/9',
5865 'GA': '41.158.0.0/15',
5866 'GB': '25.0.0.0/8',
5867 'GD': '74.122.88.0/21',
5868 'GE': '31.146.0.0/16',
5869 'GF': '161.22.64.0/18',
5870 'GG': '62.68.160.0/19',
53896ca5
S
5871 'GH': '154.160.0.0/12',
5872 'GI': '95.164.0.0/16',
773f291d
S
5873 'GL': '88.83.0.0/19',
5874 'GM': '160.182.0.0/15',
5875 'GN': '197.149.192.0/18',
5876 'GP': '104.250.0.0/19',
5877 'GQ': '105.235.224.0/20',
5878 'GR': '94.64.0.0/13',
5879 'GT': '168.234.0.0/16',
5880 'GU': '168.123.0.0/16',
5881 'GW': '197.214.80.0/20',
5882 'GY': '181.41.64.0/18',
5883 'HK': '113.252.0.0/14',
5884 'HN': '181.210.0.0/16',
5885 'HR': '93.136.0.0/13',
5886 'HT': '148.102.128.0/17',
5887 'HU': '84.0.0.0/14',
5888 'ID': '39.192.0.0/10',
5889 'IE': '87.32.0.0/12',
5890 'IL': '79.176.0.0/13',
5891 'IM': '5.62.80.0/20',
5892 'IN': '117.192.0.0/10',
5893 'IO': '203.83.48.0/21',
5894 'IQ': '37.236.0.0/14',
5895 'IR': '2.176.0.0/12',
5896 'IS': '82.221.0.0/16',
5897 'IT': '79.0.0.0/10',
5898 'JE': '87.244.64.0/18',
5899 'JM': '72.27.0.0/17',
5900 'JO': '176.29.0.0/16',
53896ca5 5901 'JP': '133.0.0.0/8',
773f291d
S
5902 'KE': '105.48.0.0/12',
5903 'KG': '158.181.128.0/17',
5904 'KH': '36.37.128.0/17',
5905 'KI': '103.25.140.0/22',
5906 'KM': '197.255.224.0/20',
53896ca5 5907 'KN': '198.167.192.0/19',
773f291d
S
5908 'KP': '175.45.176.0/22',
5909 'KR': '175.192.0.0/10',
5910 'KW': '37.36.0.0/14',
5911 'KY': '64.96.0.0/15',
5912 'KZ': '2.72.0.0/13',
5913 'LA': '115.84.64.0/18',
5914 'LB': '178.135.0.0/16',
53896ca5 5915 'LC': '24.92.144.0/20',
773f291d
S
5916 'LI': '82.117.0.0/19',
5917 'LK': '112.134.0.0/15',
53896ca5 5918 'LR': '102.183.0.0/16',
773f291d
S
5919 'LS': '129.232.0.0/17',
5920 'LT': '78.56.0.0/13',
5921 'LU': '188.42.0.0/16',
5922 'LV': '46.109.0.0/16',
5923 'LY': '41.252.0.0/14',
5924 'MA': '105.128.0.0/11',
5925 'MC': '88.209.64.0/18',
5926 'MD': '37.246.0.0/16',
5927 'ME': '178.175.0.0/17',
5928 'MF': '74.112.232.0/21',
5929 'MG': '154.126.0.0/17',
5930 'MH': '117.103.88.0/21',
5931 'MK': '77.28.0.0/15',
5932 'ML': '154.118.128.0/18',
5933 'MM': '37.111.0.0/17',
5934 'MN': '49.0.128.0/17',
5935 'MO': '60.246.0.0/16',
5936 'MP': '202.88.64.0/20',
5937 'MQ': '109.203.224.0/19',
5938 'MR': '41.188.64.0/18',
5939 'MS': '208.90.112.0/22',
5940 'MT': '46.11.0.0/16',
5941 'MU': '105.16.0.0/12',
5942 'MV': '27.114.128.0/18',
53896ca5 5943 'MW': '102.70.0.0/15',
773f291d
S
5944 'MX': '187.192.0.0/11',
5945 'MY': '175.136.0.0/13',
5946 'MZ': '197.218.0.0/15',
5947 'NA': '41.182.0.0/16',
5948 'NC': '101.101.0.0/18',
5949 'NE': '197.214.0.0/18',
5950 'NF': '203.17.240.0/22',
5951 'NG': '105.112.0.0/12',
5952 'NI': '186.76.0.0/15',
5953 'NL': '145.96.0.0/11',
5954 'NO': '84.208.0.0/13',
5955 'NP': '36.252.0.0/15',
5956 'NR': '203.98.224.0/19',
5957 'NU': '49.156.48.0/22',
5958 'NZ': '49.224.0.0/14',
5959 'OM': '5.36.0.0/15',
5960 'PA': '186.72.0.0/15',
5961 'PE': '186.160.0.0/14',
5962 'PF': '123.50.64.0/18',
5963 'PG': '124.240.192.0/19',
5964 'PH': '49.144.0.0/13',
5965 'PK': '39.32.0.0/11',
5966 'PL': '83.0.0.0/11',
5967 'PM': '70.36.0.0/20',
5968 'PR': '66.50.0.0/16',
5969 'PS': '188.161.0.0/16',
5970 'PT': '85.240.0.0/13',
5971 'PW': '202.124.224.0/20',
5972 'PY': '181.120.0.0/14',
5973 'QA': '37.210.0.0/15',
53896ca5 5974 'RE': '102.35.0.0/16',
773f291d 5975 'RO': '79.112.0.0/13',
53896ca5 5976 'RS': '93.86.0.0/15',
773f291d 5977 'RU': '5.136.0.0/13',
53896ca5 5978 'RW': '41.186.0.0/16',
773f291d
S
5979 'SA': '188.48.0.0/13',
5980 'SB': '202.1.160.0/19',
5981 'SC': '154.192.0.0/11',
53896ca5 5982 'SD': '102.120.0.0/13',
773f291d 5983 'SE': '78.64.0.0/12',
53896ca5 5984 'SG': '8.128.0.0/10',
773f291d
S
5985 'SI': '188.196.0.0/14',
5986 'SK': '78.98.0.0/15',
53896ca5 5987 'SL': '102.143.0.0/17',
773f291d
S
5988 'SM': '89.186.32.0/19',
5989 'SN': '41.82.0.0/15',
53896ca5 5990 'SO': '154.115.192.0/18',
773f291d
S
5991 'SR': '186.179.128.0/17',
5992 'SS': '105.235.208.0/21',
5993 'ST': '197.159.160.0/19',
5994 'SV': '168.243.0.0/16',
5995 'SX': '190.102.0.0/20',
5996 'SY': '5.0.0.0/16',
5997 'SZ': '41.84.224.0/19',
5998 'TC': '65.255.48.0/20',
5999 'TD': '154.68.128.0/19',
6000 'TG': '196.168.0.0/14',
6001 'TH': '171.96.0.0/13',
6002 'TJ': '85.9.128.0/18',
6003 'TK': '27.96.24.0/21',
6004 'TL': '180.189.160.0/20',
6005 'TM': '95.85.96.0/19',
6006 'TN': '197.0.0.0/11',
6007 'TO': '175.176.144.0/21',
6008 'TR': '78.160.0.0/11',
6009 'TT': '186.44.0.0/15',
6010 'TV': '202.2.96.0/19',
6011 'TW': '120.96.0.0/11',
6012 'TZ': '156.156.0.0/14',
53896ca5
S
6013 'UA': '37.52.0.0/14',
6014 'UG': '102.80.0.0/13',
6015 'US': '6.0.0.0/8',
773f291d 6016 'UY': '167.56.0.0/13',
53896ca5 6017 'UZ': '84.54.64.0/18',
773f291d 6018 'VA': '212.77.0.0/19',
53896ca5 6019 'VC': '207.191.240.0/21',
773f291d 6020 'VE': '186.88.0.0/13',
53896ca5 6021 'VG': '66.81.192.0/20',
773f291d
S
6022 'VI': '146.226.0.0/16',
6023 'VN': '14.160.0.0/11',
6024 'VU': '202.80.32.0/20',
6025 'WF': '117.20.32.0/21',
6026 'WS': '202.4.32.0/19',
6027 'YE': '134.35.0.0/16',
6028 'YT': '41.242.116.0/22',
6029 'ZA': '41.0.0.0/11',
53896ca5
S
6030 'ZM': '102.144.0.0/13',
6031 'ZW': '102.177.192.0/18',
773f291d
S
6032 }
6033
6034 @classmethod
5f95927a
S
6035 def random_ipv4(cls, code_or_block):
6036 if len(code_or_block) == 2:
6037 block = cls._country_ip_map.get(code_or_block.upper())
6038 if not block:
6039 return None
6040 else:
6041 block = code_or_block
773f291d
S
6042 addr, preflen = block.split('/')
6043 addr_min = compat_struct_unpack('!L', socket.inet_aton(addr))[0]
6044 addr_max = addr_min | (0xffffffff >> int(preflen))
18a0defa 6045 return compat_str(socket.inet_ntoa(
4248dad9 6046 compat_struct_pack('!L', random.randint(addr_min, addr_max))))
773f291d
S
6047
6048
91410c9b 6049class PerRequestProxyHandler(compat_urllib_request.ProxyHandler):
2461f79d
PH
6050 def __init__(self, proxies=None):
6051 # Set default handlers
6052 for type in ('http', 'https'):
6053 setattr(self, '%s_open' % type,
6054 lambda r, proxy='__noproxy__', type=type, meth=self.proxy_open:
6055 meth(r, proxy, type))
38e87f6c 6056 compat_urllib_request.ProxyHandler.__init__(self, proxies)
2461f79d 6057
91410c9b 6058 def proxy_open(self, req, proxy, type):
2461f79d 6059 req_proxy = req.headers.get('Ytdl-request-proxy')
91410c9b
PH
6060 if req_proxy is not None:
6061 proxy = req_proxy
2461f79d
PH
6062 del req.headers['Ytdl-request-proxy']
6063
6064 if proxy == '__noproxy__':
6065 return None # No Proxy
51fb4995 6066 if compat_urlparse.urlparse(proxy).scheme.lower() in ('socks', 'socks4', 'socks4a', 'socks5'):
71aff188 6067 req.add_header('Ytdl-socks-proxy', proxy)
7a5c1cfe 6068 # yt-dlp's http/https handlers do wrapping the socket with socks
71aff188 6069 return None
91410c9b
PH
6070 return compat_urllib_request.ProxyHandler.proxy_open(
6071 self, req, proxy, type)
5bc880b9
YCH
6072
6073
0a5445dd
YCH
6074# Both long_to_bytes and bytes_to_long are adapted from PyCrypto, which is
6075# released into Public Domain
6076# https://github.com/dlitz/pycrypto/blob/master/lib/Crypto/Util/number.py#L387
6077
6078def long_to_bytes(n, blocksize=0):
6079 """long_to_bytes(n:long, blocksize:int) : string
6080 Convert a long integer to a byte string.
6081
6082 If optional blocksize is given and greater than zero, pad the front of the
6083 byte string with binary zeros so that the length is a multiple of
6084 blocksize.
6085 """
6086 # after much testing, this algorithm was deemed to be the fastest
6087 s = b''
6088 n = int(n)
6089 while n > 0:
6090 s = compat_struct_pack('>I', n & 0xffffffff) + s
6091 n = n >> 32
6092 # strip off leading zeros
6093 for i in range(len(s)):
6094 if s[i] != b'\000'[0]:
6095 break
6096 else:
6097 # only happens when n == 0
6098 s = b'\000'
6099 i = 0
6100 s = s[i:]
6101 # add back some pad bytes. this could be done more efficiently w.r.t. the
6102 # de-padding being done above, but sigh...
6103 if blocksize > 0 and len(s) % blocksize:
6104 s = (blocksize - len(s) % blocksize) * b'\000' + s
6105 return s
6106
6107
6108def bytes_to_long(s):
6109 """bytes_to_long(string) : long
6110 Convert a byte string to a long integer.
6111
6112 This is (essentially) the inverse of long_to_bytes().
6113 """
6114 acc = 0
6115 length = len(s)
6116 if length % 4:
6117 extra = (4 - length % 4)
6118 s = b'\000' * extra + s
6119 length = length + extra
6120 for i in range(0, length, 4):
6121 acc = (acc << 32) + compat_struct_unpack('>I', s[i:i + 4])[0]
6122 return acc
6123
6124
5bc880b9
YCH
6125def ohdave_rsa_encrypt(data, exponent, modulus):
6126 '''
6127 Implement OHDave's RSA algorithm. See http://www.ohdave.com/rsa/
6128
6129 Input:
6130 data: data to encrypt, bytes-like object
6131 exponent, modulus: parameter e and N of RSA algorithm, both integer
6132 Output: hex string of encrypted data
6133
6134 Limitation: supports one block encryption only
6135 '''
6136
6137 payload = int(binascii.hexlify(data[::-1]), 16)
6138 encrypted = pow(payload, exponent, modulus)
6139 return '%x' % encrypted
81bdc8fd
YCH
6140
6141
f48409c7
YCH
6142def pkcs1pad(data, length):
6143 """
6144 Padding input data with PKCS#1 scheme
6145
6146 @param {int[]} data input data
6147 @param {int} length target length
6148 @returns {int[]} padded data
6149 """
6150 if len(data) > length - 11:
6151 raise ValueError('Input data too long for PKCS#1 padding')
6152
6153 pseudo_random = [random.randint(0, 254) for _ in range(length - len(data) - 3)]
6154 return [0, 2] + pseudo_random + [0] + data
6155
6156
5eb6bdce 6157def encode_base_n(num, n, table=None):
59f898b7 6158 FULL_TABLE = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
59f898b7
YCH
6159 if not table:
6160 table = FULL_TABLE[:n]
6161
5eb6bdce
YCH
6162 if n > len(table):
6163 raise ValueError('base %d exceeds table length %d' % (n, len(table)))
6164
6165 if num == 0:
6166 return table[0]
6167
81bdc8fd
YCH
6168 ret = ''
6169 while num:
6170 ret = table[num % n] + ret
6171 num = num // n
6172 return ret
f52354a8
YCH
6173
6174
6175def decode_packed_codes(code):
06b3fe29 6176 mobj = re.search(PACKED_CODES_RE, code)
a0566bbf 6177 obfuscated_code, base, count, symbols = mobj.groups()
f52354a8
YCH
6178 base = int(base)
6179 count = int(count)
6180 symbols = symbols.split('|')
6181 symbol_table = {}
6182
6183 while count:
6184 count -= 1
5eb6bdce 6185 base_n_count = encode_base_n(count, base)
f52354a8
YCH
6186 symbol_table[base_n_count] = symbols[count] or base_n_count
6187
6188 return re.sub(
6189 r'\b(\w+)\b', lambda mobj: symbol_table[mobj.group(0)],
a0566bbf 6190 obfuscated_code)
e154c651 6191
6192
1ced2221
S
6193def caesar(s, alphabet, shift):
6194 if shift == 0:
6195 return s
6196 l = len(alphabet)
6197 return ''.join(
6198 alphabet[(alphabet.index(c) + shift) % l] if c in alphabet else c
6199 for c in s)
6200
6201
6202def rot47(s):
6203 return caesar(s, r'''!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~''', 47)
6204
6205
e154c651 6206def parse_m3u8_attributes(attrib):
6207 info = {}
6208 for (key, val) in re.findall(r'(?P<key>[A-Z0-9-]+)=(?P<val>"[^"]+"|[^",]+)(?:,|$)', attrib):
6209 if val.startswith('"'):
6210 val = val[1:-1]
6211 info[key] = val
6212 return info
1143535d
YCH
6213
6214
6215def urshift(val, n):
6216 return val >> n if val >= 0 else (val + 0x100000000) >> n
d3f8e038
YCH
6217
6218
6219# Based on png2str() written by @gdkchan and improved by @yokrysty
067aa17e 6220# Originally posted at https://github.com/ytdl-org/youtube-dl/issues/9706
d3f8e038
YCH
6221def decode_png(png_data):
6222 # Reference: https://www.w3.org/TR/PNG/
6223 header = png_data[8:]
6224
6225 if png_data[:8] != b'\x89PNG\x0d\x0a\x1a\x0a' or header[4:8] != b'IHDR':
6226 raise IOError('Not a valid PNG file.')
6227
6228 int_map = {1: '>B', 2: '>H', 4: '>I'}
6229 unpack_integer = lambda x: compat_struct_unpack(int_map[len(x)], x)[0]
6230
6231 chunks = []
6232
6233 while header:
6234 length = unpack_integer(header[:4])
6235 header = header[4:]
6236
6237 chunk_type = header[:4]
6238 header = header[4:]
6239
6240 chunk_data = header[:length]
6241 header = header[length:]
6242
6243 header = header[4:] # Skip CRC
6244
6245 chunks.append({
6246 'type': chunk_type,
6247 'length': length,
6248 'data': chunk_data
6249 })
6250
6251 ihdr = chunks[0]['data']
6252
6253 width = unpack_integer(ihdr[:4])
6254 height = unpack_integer(ihdr[4:8])
6255
6256 idat = b''
6257
6258 for chunk in chunks:
6259 if chunk['type'] == b'IDAT':
6260 idat += chunk['data']
6261
6262 if not idat:
6263 raise IOError('Unable to read PNG data.')
6264
6265 decompressed_data = bytearray(zlib.decompress(idat))
6266
6267 stride = width * 3
6268 pixels = []
6269
6270 def _get_pixel(idx):
6271 x = idx % stride
6272 y = idx // stride
6273 return pixels[y][x]
6274
6275 for y in range(height):
6276 basePos = y * (1 + stride)
6277 filter_type = decompressed_data[basePos]
6278
6279 current_row = []
6280
6281 pixels.append(current_row)
6282
6283 for x in range(stride):
6284 color = decompressed_data[1 + basePos + x]
6285 basex = y * stride + x
6286 left = 0
6287 up = 0
6288
6289 if x > 2:
6290 left = _get_pixel(basex - 3)
6291 if y > 0:
6292 up = _get_pixel(basex - stride)
6293
6294 if filter_type == 1: # Sub
6295 color = (color + left) & 0xff
6296 elif filter_type == 2: # Up
6297 color = (color + up) & 0xff
6298 elif filter_type == 3: # Average
6299 color = (color + ((left + up) >> 1)) & 0xff
6300 elif filter_type == 4: # Paeth
6301 a = left
6302 b = up
6303 c = 0
6304
6305 if x > 2 and y > 0:
6306 c = _get_pixel(basex - stride - 3)
6307
6308 p = a + b - c
6309
6310 pa = abs(p - a)
6311 pb = abs(p - b)
6312 pc = abs(p - c)
6313
6314 if pa <= pb and pa <= pc:
6315 color = (color + a) & 0xff
6316 elif pb <= pc:
6317 color = (color + b) & 0xff
6318 else:
6319 color = (color + c) & 0xff
6320
6321 current_row.append(color)
6322
6323 return width, height, pixels
efa97bdc
YCH
6324
6325
6326def write_xattr(path, key, value):
6327 # This mess below finds the best xattr tool for the job
6328 try:
6329 # try the pyxattr module...
6330 import xattr
6331
53a7e3d2
YCH
6332 if hasattr(xattr, 'set'): # pyxattr
6333 # Unicode arguments are not supported in python-pyxattr until
6334 # version 0.5.0
067aa17e 6335 # See https://github.com/ytdl-org/youtube-dl/issues/5498
53a7e3d2
YCH
6336 pyxattr_required_version = '0.5.0'
6337 if version_tuple(xattr.__version__) < version_tuple(pyxattr_required_version):
6338 # TODO: fallback to CLI tools
6339 raise XAttrUnavailableError(
6340 'python-pyxattr is detected but is too old. '
7a5c1cfe 6341 'yt-dlp requires %s or above while your version is %s. '
53a7e3d2
YCH
6342 'Falling back to other xattr implementations' % (
6343 pyxattr_required_version, xattr.__version__))
6344
6345 setxattr = xattr.set
6346 else: # xattr
6347 setxattr = xattr.setxattr
efa97bdc
YCH
6348
6349 try:
53a7e3d2 6350 setxattr(path, key, value)
efa97bdc
YCH
6351 except EnvironmentError as e:
6352 raise XAttrMetadataError(e.errno, e.strerror)
6353
6354 except ImportError:
6355 if compat_os_name == 'nt':
6356 # Write xattrs to NTFS Alternate Data Streams:
6357 # http://en.wikipedia.org/wiki/NTFS#Alternate_data_streams_.28ADS.29
6358 assert ':' not in key
6359 assert os.path.exists(path)
6360
6361 ads_fn = path + ':' + key
6362 try:
6363 with open(ads_fn, 'wb') as f:
6364 f.write(value)
6365 except EnvironmentError as e:
6366 raise XAttrMetadataError(e.errno, e.strerror)
6367 else:
6368 user_has_setfattr = check_executable('setfattr', ['--version'])
6369 user_has_xattr = check_executable('xattr', ['-h'])
6370
6371 if user_has_setfattr or user_has_xattr:
6372
6373 value = value.decode('utf-8')
6374 if user_has_setfattr:
6375 executable = 'setfattr'
6376 opts = ['-n', key, '-v', value]
6377 elif user_has_xattr:
6378 executable = 'xattr'
6379 opts = ['-w', key, value]
6380
3089bc74
S
6381 cmd = ([encodeFilename(executable, True)]
6382 + [encodeArgument(o) for o in opts]
6383 + [encodeFilename(path, True)])
efa97bdc
YCH
6384
6385 try:
d3c93ec2 6386 p = Popen(
efa97bdc
YCH
6387 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
6388 except EnvironmentError as e:
6389 raise XAttrMetadataError(e.errno, e.strerror)
d3c93ec2 6390 stdout, stderr = p.communicate_or_kill()
efa97bdc
YCH
6391 stderr = stderr.decode('utf-8', 'replace')
6392 if p.returncode != 0:
6393 raise XAttrMetadataError(p.returncode, stderr)
6394
6395 else:
6396 # On Unix, and can't find pyxattr, setfattr, or xattr.
6397 if sys.platform.startswith('linux'):
6398 raise XAttrUnavailableError(
6399 "Couldn't find a tool to set the xattrs. "
6400 "Install either the python 'pyxattr' or 'xattr' "
6401 "modules, or the GNU 'attr' package "
6402 "(which contains the 'setfattr' tool).")
6403 else:
6404 raise XAttrUnavailableError(
6405 "Couldn't find a tool to set the xattrs. "
6406 "Install either the python 'xattr' module, "
6407 "or the 'xattr' binary.")
0c265486
YCH
6408
6409
6410def random_birthday(year_field, month_field, day_field):
aa374bc7
AS
6411 start_date = datetime.date(1950, 1, 1)
6412 end_date = datetime.date(1995, 12, 31)
6413 offset = random.randint(0, (end_date - start_date).days)
6414 random_date = start_date + datetime.timedelta(offset)
0c265486 6415 return {
aa374bc7
AS
6416 year_field: str(random_date.year),
6417 month_field: str(random_date.month),
6418 day_field: str(random_date.day),
0c265486 6419 }
732044af 6420
c76eb41b 6421
732044af 6422# Templates for internet shortcut files, which are plain text files.
6423DOT_URL_LINK_TEMPLATE = '''
6424[InternetShortcut]
6425URL=%(url)s
6426'''.lstrip()
6427
6428DOT_WEBLOC_LINK_TEMPLATE = '''
6429<?xml version="1.0" encoding="UTF-8"?>
6430<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
6431<plist version="1.0">
6432<dict>
6433\t<key>URL</key>
6434\t<string>%(url)s</string>
6435</dict>
6436</plist>
6437'''.lstrip()
6438
6439DOT_DESKTOP_LINK_TEMPLATE = '''
6440[Desktop Entry]
6441Encoding=UTF-8
6442Name=%(filename)s
6443Type=Link
6444URL=%(url)s
6445Icon=text-html
6446'''.lstrip()
6447
08438d2c 6448LINK_TEMPLATES = {
6449 'url': DOT_URL_LINK_TEMPLATE,
6450 'desktop': DOT_DESKTOP_LINK_TEMPLATE,
6451 'webloc': DOT_WEBLOC_LINK_TEMPLATE,
6452}
6453
732044af 6454
6455def iri_to_uri(iri):
6456 """
6457 Converts an IRI (Internationalized Resource Identifier, allowing Unicode characters) to a URI (Uniform Resource Identifier, ASCII-only).
6458
6459 The function doesn't add an additional layer of escaping; e.g., it doesn't escape `%3C` as `%253C`. Instead, it percent-escapes characters with an underlying UTF-8 encoding *besides* those already escaped, leaving the URI intact.
6460 """
6461
6462 iri_parts = compat_urllib_parse_urlparse(iri)
6463
6464 if '[' in iri_parts.netloc:
6465 raise ValueError('IPv6 URIs are not, yet, supported.')
6466 # Querying `.netloc`, when there's only one bracket, also raises a ValueError.
6467
6468 # The `safe` argument values, that the following code uses, contain the characters that should not be percent-encoded. Everything else but letters, digits and '_.-' will be percent-encoded with an underlying UTF-8 encoding. Everything already percent-encoded will be left as is.
6469
6470 net_location = ''
6471 if iri_parts.username:
6472 net_location += compat_urllib_parse_quote(iri_parts.username, safe=r"!$%&'()*+,~")
6473 if iri_parts.password is not None:
6474 net_location += ':' + compat_urllib_parse_quote(iri_parts.password, safe=r"!$%&'()*+,~")
6475 net_location += '@'
6476
6477 net_location += iri_parts.hostname.encode('idna').decode('utf-8') # Punycode for Unicode hostnames.
6478 # The 'idna' encoding produces ASCII text.
6479 if iri_parts.port is not None and iri_parts.port != 80:
6480 net_location += ':' + str(iri_parts.port)
6481
6482 return compat_urllib_parse_urlunparse(
6483 (iri_parts.scheme,
6484 net_location,
6485
6486 compat_urllib_parse_quote_plus(iri_parts.path, safe=r"!$%&'()*+,/:;=@|~"),
6487
6488 # Unsure about the `safe` argument, since this is a legacy way of handling parameters.
6489 compat_urllib_parse_quote_plus(iri_parts.params, safe=r"!$%&'()*+,/:;=@|~"),
6490
6491 # Not totally sure about the `safe` argument, since the source does not explicitly mention the query URI component.
6492 compat_urllib_parse_quote_plus(iri_parts.query, safe=r"!$%&'()*+,/:;=?@{|}~"),
6493
6494 compat_urllib_parse_quote_plus(iri_parts.fragment, safe=r"!#$%&'()*+,/:;=?@{|}~")))
6495
6496 # Source for `safe` arguments: https://url.spec.whatwg.org/#percent-encoded-bytes.
6497
6498
6499def to_high_limit_path(path):
6500 if sys.platform in ['win32', 'cygwin']:
6501 # Work around MAX_PATH limitation on Windows. The maximum allowed length for the individual path segments may still be quite limited.
6502 return r'\\?\ '.rstrip() + os.path.abspath(path)
6503
6504 return path
76d321f6 6505
c76eb41b 6506
b868936c 6507def format_field(obj, field=None, template='%s', ignore=(None, ''), default='', func=None):
6508 if field is None:
6509 val = obj if obj is not None else default
6510 else:
6511 val = obj.get(field, default)
76d321f6 6512 if func and val not in ignore:
6513 val = func(val)
6514 return template % val if val not in ignore else default
00dd0cd5 6515
6516
6517def clean_podcast_url(url):
6518 return re.sub(r'''(?x)
6519 (?:
6520 (?:
6521 chtbl\.com/track|
6522 media\.blubrry\.com| # https://create.blubrry.com/resources/podcast-media-download-statistics/getting-started/
6523 play\.podtrac\.com
6524 )/[^/]+|
6525 (?:dts|www)\.podtrac\.com/(?:pts/)?redirect\.[0-9a-z]{3,4}| # http://analytics.podtrac.com/how-to-measure
6526 flex\.acast\.com|
6527 pd(?:
6528 cn\.co| # https://podcorn.com/analytics-prefix/
6529 st\.fm # https://podsights.com/docs/
6530 )/e
6531 )/''', '', url)
ffcb8191
THD
6532
6533
6534_HEX_TABLE = '0123456789abcdef'
6535
6536
6537def random_uuidv4():
6538 return re.sub(r'[xy]', lambda x: _HEX_TABLE[random.randint(0, 15)], 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx')
0202b52a 6539
6540
6541def make_dir(path, to_screen=None):
6542 try:
6543 dn = os.path.dirname(path)
6544 if dn and not os.path.exists(dn):
6545 os.makedirs(dn)
6546 return True
6547 except (OSError, IOError) as err:
6548 if callable(to_screen) is not None:
6549 to_screen('unable to create directory ' + error_to_compat_str(err))
6550 return False
f74980cb 6551
6552
6553def get_executable_path():
c552ae88 6554 from zipimport import zipimporter
6555 if hasattr(sys, 'frozen'): # Running from PyInstaller
6556 path = os.path.dirname(sys.executable)
6557 elif isinstance(globals().get('__loader__'), zipimporter): # Running from ZIP
6558 path = os.path.join(os.path.dirname(__file__), '../..')
6559 else:
6560 path = os.path.join(os.path.dirname(__file__), '..')
f74980cb 6561 return os.path.abspath(path)
6562
6563
2f567473 6564def load_plugins(name, suffix, namespace):
3ae5e797 6565 classes = {}
f74980cb 6566 try:
019a94f7
ÁS
6567 plugins_spec = importlib.util.spec_from_file_location(
6568 name, os.path.join(get_executable_path(), 'ytdlp_plugins', name, '__init__.py'))
6569 plugins = importlib.util.module_from_spec(plugins_spec)
6570 sys.modules[plugins_spec.name] = plugins
6571 plugins_spec.loader.exec_module(plugins)
f74980cb 6572 for name in dir(plugins):
2f567473 6573 if name in namespace:
6574 continue
6575 if not name.endswith(suffix):
f74980cb 6576 continue
6577 klass = getattr(plugins, name)
3ae5e797 6578 classes[name] = namespace[name] = klass
019a94f7 6579 except FileNotFoundError:
f74980cb 6580 pass
f74980cb 6581 return classes
06167fbb 6582
6583
325ebc17 6584def traverse_obj(
352d63fd 6585 obj, *path_list, default=None, expected_type=None, get_all=True,
325ebc17 6586 casesense=True, is_user_input=False, traverse_string=False):
324ad820 6587 ''' Traverse nested list/dict/tuple
8f334380 6588 @param path_list A list of paths which are checked one by one.
6589 Each path is a list of keys where each key is a string,
1797b073 6590 a function, a tuple of strings/None or "...".
2614f646 6591 When a fuction is given, it takes the key as argument and
6592 returns whether the key matches or not. When a tuple is given,
8f334380 6593 all the keys given in the tuple are traversed, and
6594 "..." traverses all the keys in the object
1797b073 6595 "None" returns the object without traversal
325ebc17 6596 @param default Default value to return
352d63fd 6597 @param expected_type Only accept final value of this type (Can also be any callable)
6598 @param get_all Return all the values obtained from a path or only the first one
324ad820 6599 @param casesense Whether to consider dictionary keys as case sensitive
6600 @param is_user_input Whether the keys are generated from user input. If True,
6601 strings are converted to int/slice if necessary
6602 @param traverse_string Whether to traverse inside strings. If True, any
6603 non-compatible object will also be converted into a string
8f334380 6604 # TODO: Write tests
324ad820 6605 '''
325ebc17 6606 if not casesense:
dbf5416a 6607 _lower = lambda k: (k.lower() if isinstance(k, str) else k)
8f334380 6608 path_list = (map(_lower, variadic(path)) for path in path_list)
6609
6610 def _traverse_obj(obj, path, _current_depth=0):
6611 nonlocal depth
6612 path = tuple(variadic(path))
6613 for i, key in enumerate(path):
1797b073 6614 if None in (key, obj):
6615 return obj
8f334380 6616 if isinstance(key, (list, tuple)):
6617 obj = [_traverse_obj(obj, sub_key, _current_depth) for sub_key in key]
6618 key = ...
6619 if key is ...:
6620 obj = (obj.values() if isinstance(obj, dict)
6621 else obj if isinstance(obj, (list, tuple, LazyList))
6622 else str(obj) if traverse_string else [])
6623 _current_depth += 1
6624 depth = max(depth, _current_depth)
6625 return [_traverse_obj(inner_obj, path[i + 1:], _current_depth) for inner_obj in obj]
2614f646 6626 elif callable(key):
6627 if isinstance(obj, (list, tuple, LazyList)):
6628 obj = enumerate(obj)
6629 elif isinstance(obj, dict):
6630 obj = obj.items()
6631 else:
6632 if not traverse_string:
6633 return None
6634 obj = str(obj)
6635 _current_depth += 1
6636 depth = max(depth, _current_depth)
6637 return [_traverse_obj(v, path[i + 1:], _current_depth) for k, v in obj if key(k)]
575e17a1 6638 elif isinstance(obj, dict) and not (is_user_input and key == ':'):
325ebc17 6639 obj = (obj.get(key) if casesense or (key in obj)
6640 else next((v for k, v in obj.items() if _lower(k) == key), None))
6641 else:
6642 if is_user_input:
6643 key = (int_or_none(key) if ':' not in key
6644 else slice(*map(int_or_none, key.split(':'))))
8f334380 6645 if key == slice(None):
575e17a1 6646 return _traverse_obj(obj, (..., *path[i + 1:]), _current_depth)
325ebc17 6647 if not isinstance(key, (int, slice)):
9fea350f 6648 return None
8f334380 6649 if not isinstance(obj, (list, tuple, LazyList)):
325ebc17 6650 if not traverse_string:
6651 return None
6652 obj = str(obj)
6653 try:
6654 obj = obj[key]
6655 except IndexError:
324ad820 6656 return None
325ebc17 6657 return obj
6658
352d63fd 6659 if isinstance(expected_type, type):
6660 type_test = lambda val: val if isinstance(val, expected_type) else None
6661 elif expected_type is not None:
6662 type_test = expected_type
6663 else:
6664 type_test = lambda val: val
6665
8f334380 6666 for path in path_list:
6667 depth = 0
6668 val = _traverse_obj(obj, path)
325ebc17 6669 if val is not None:
8f334380 6670 if depth:
6671 for _ in range(depth - 1):
6586bca9 6672 val = itertools.chain.from_iterable(v for v in val if v is not None)
352d63fd 6673 val = [v for v in map(type_test, val) if v is not None]
8f334380 6674 if val:
352d63fd 6675 return val if get_all else val[0]
6676 else:
6677 val = type_test(val)
6678 if val is not None:
8f334380 6679 return val
325ebc17 6680 return default
324ad820 6681
6682
6683def traverse_dict(dictn, keys, casesense=True):
ee8dd27a 6684 write_string('DeprecationWarning: yt_dlp.utils.traverse_dict is deprecated '
6685 'and may be removed in a future version. Use yt_dlp.utils.traverse_obj instead')
6686 return traverse_obj(dictn, keys, casesense=casesense, is_user_input=True, traverse_string=True)
6606817a 6687
6688
4b4b7f74 6689def variadic(x, allowed_types=(str, bytes, dict)):
cb89cfc1 6690 return x if isinstance(x, collections.abc.Iterable) and not isinstance(x, allowed_types) else (x,)
bd50a52b
THD
6691
6692
49fa4d9a
N
6693# create a JSON Web Signature (jws) with HS256 algorithm
6694# the resulting format is in JWS Compact Serialization
6695# implemented following JWT https://www.rfc-editor.org/rfc/rfc7519.html
6696# implemented following JWS https://www.rfc-editor.org/rfc/rfc7515.html
6697def jwt_encode_hs256(payload_data, key, headers={}):
6698 header_data = {
6699 'alg': 'HS256',
6700 'typ': 'JWT',
6701 }
6702 if headers:
6703 header_data.update(headers)
6704 header_b64 = base64.b64encode(json.dumps(header_data).encode('utf-8'))
6705 payload_b64 = base64.b64encode(json.dumps(payload_data).encode('utf-8'))
6706 h = hmac.new(key.encode('utf-8'), header_b64 + b'.' + payload_b64, hashlib.sha256)
6707 signature_b64 = base64.b64encode(h.digest())
6708 token = header_b64 + b'.' + payload_b64 + b'.' + signature_b64
6709 return token
819e0531 6710
6711
16b0d7e6 6712# can be extended in future to verify the signature and parse header and return the algorithm used if it's not HS256
6713def jwt_decode_hs256(jwt):
6714 header_b64, payload_b64, signature_b64 = jwt.split('.')
6715 payload_data = json.loads(base64.urlsafe_b64decode(payload_b64))
6716 return payload_data
6717
6718
819e0531 6719def supports_terminal_sequences(stream):
6720 if compat_os_name == 'nt':
e3c7d495 6721 from .compat import WINDOWS_VT_MODE # Must be imported locally
6722 if not WINDOWS_VT_MODE or get_windows_version() < (10, 0, 10586):
819e0531 6723 return False
6724 elif not os.getenv('TERM'):
6725 return False
6726 try:
6727 return stream.isatty()
6728 except BaseException:
6729 return False
6730
6731
ec11a9f4 6732_terminal_sequences_re = re.compile('\033\\[[^m]+m')
6733
6734
6735def remove_terminal_sequences(string):
6736 return _terminal_sequences_re.sub('', string)
6737
6738
6739def number_of_digits(number):
6740 return len('%d' % number)
34921b43 6741
6742
6743def join_nonempty(*values, delim='-', from_dict=None):
6744 if from_dict is not None:
c586f9e8 6745 values = map(from_dict.get, values)
34921b43 6746 return delim.join(map(str, filter(None, values)))
06e57990 6747
6748
6749class Config:
6750 own_args = None
6751 filename = None
6752 __initialized = False
6753
6754 def __init__(self, parser, label=None):
6755 self._parser, self.label = parser, label
6756 self._loaded_paths, self.configs = set(), []
6757
6758 def init(self, args=None, filename=None):
6759 assert not self.__initialized
6760 if filename:
6761 location = os.path.realpath(filename)
6762 if location in self._loaded_paths:
6763 return False
6764 self._loaded_paths.add(location)
6765
6766 self.__initialized = True
6767 self.own_args, self.filename = args, filename
6768 for location in self._parser.parse_args(args)[0].config_locations or []:
6769 location = compat_expanduser(location)
6770 if os.path.isdir(location):
6771 location = os.path.join(location, 'yt-dlp.conf')
6772 if not os.path.exists(location):
6773 self._parser.error(f'config location {location} does not exist')
6774 self.append_config(self.read_file(location), location)
6775 return True
6776
6777 def __str__(self):
6778 label = join_nonempty(
6779 self.label, 'config', f'"{self.filename}"' if self.filename else '',
6780 delim=' ')
6781 return join_nonempty(
6782 self.own_args is not None and f'{label[0].upper()}{label[1:]}: {self.hide_login_info(self.own_args)}',
6783 *(f'\n{c}'.replace('\n', '\n| ')[1:] for c in self.configs),
6784 delim='\n')
6785
6786 @staticmethod
6787 def read_file(filename, default=[]):
6788 try:
6789 optionf = open(filename)
6790 except IOError:
6791 return default # silently skip if file is not present
6792 try:
6793 # FIXME: https://github.com/ytdl-org/youtube-dl/commit/dfe5fa49aed02cf36ba9f743b11b0903554b5e56
6794 contents = optionf.read()
6795 if sys.version_info < (3,):
6796 contents = contents.decode(preferredencoding())
6797 res = compat_shlex_split(contents, comments=True)
6798 finally:
6799 optionf.close()
6800 return res
6801
6802 @staticmethod
6803 def hide_login_info(opts):
6804 PRIVATE_OPTS = set(['-p', '--password', '-u', '--username', '--video-password', '--ap-password', '--ap-username'])
6805 eqre = re.compile('^(?P<key>' + ('|'.join(re.escape(po) for po in PRIVATE_OPTS)) + ')=.+$')
6806
6807 def _scrub_eq(o):
6808 m = eqre.match(o)
6809 if m:
6810 return m.group('key') + '=PRIVATE'
6811 else:
6812 return o
6813
6814 opts = list(map(_scrub_eq, opts))
6815 for idx, opt in enumerate(opts):
6816 if opt in PRIVATE_OPTS and idx + 1 < len(opts):
6817 opts[idx + 1] = 'PRIVATE'
6818 return opts
6819
6820 def append_config(self, *args, label=None):
6821 config = type(self)(self._parser, label)
6822 config._loaded_paths = self._loaded_paths
6823 if config.init(*args):
6824 self.configs.append(config)
6825
6826 @property
6827 def all_args(self):
6828 for config in reversed(self.configs):
6829 yield from config.all_args
6830 yield from self.own_args or []
6831
6832 def parse_args(self):
6833 return self._parser.parse_args(list(self.all_args))