* `comment_sort`: `top` or `new` (default) - choose comment sorting mode (on YouTube's side)
* `max_comments`: Limit the amount of comments to gather. Comma-separated list of integers representing `max-comments,max-parents,max-replies,max-replies-per-thread`. Default is `all,all,all,all`
* E.g. `all,all,1000,10` will get a maximum of 1000 replies total, with up to 10 replies per thread. `1000,all,100` will get a maximum of 1000 comments, with a maximum of 100 replies total
+* `include_duplicate_formats`: Extract formats with identical content but different URLs or protocol. This is useful if some of the formats are unavailable or throttled.
* `include_incomplete_formats`: Extract formats that cannot be downloaded completely (live dash and post-live m3u8)
* `innertube_host`: Innertube API host to use for all API requests; e.g. `studio.youtube.com`, `youtubei.googleapis.com`. Note that cookies exported from one subdomain will not work on others
* `innertube_key`: Innertube API key to use for all API requests
'small', 'medium', 'large', 'hd720', 'hd1080', 'hd1440', 'hd2160', 'hd2880', 'highres'
])
streaming_formats = traverse_obj(streaming_data, (..., ('formats', 'adaptiveFormats'), ...))
+ all_formats = self._configuration_arg('include_duplicate_formats')
for fmt in streaming_formats:
if fmt.get('targetDurationSec'):
itag = str_or_none(fmt.get('itag'))
audio_track = fmt.get('audioTrack') or {}
stream_id = (itag, audio_track.get('id'), fmt.get('isDrc'))
- if stream_id in stream_ids:
- continue
+ if not all_formats:
+ if stream_id in stream_ids:
+ continue
quality = fmt.get('quality')
height = int_or_none(fmt.get('height'))
try_get(fmt, lambda x: x['projectionType'].replace('RECTANGULAR', '').lower()),
try_get(fmt, lambda x: x['spatialAudioType'].replace('SPATIAL_AUDIO_TYPE_', '').lower()),
throttled and 'THROTTLED', is_damaged and 'DAMAGED',
- self.get_param('verbose') and client_name,
+ (self.get_param('verbose') or all_formats) and client_name,
delim=', '),
# Format 22 is likely to be damaged. See https://github.com/yt-dlp/yt-dlp/issues/3372
'source_preference': -10 if throttled else -5 if itag == '22' else -1,
if mime_mobj:
dct['ext'] = mimetype2ext(mime_mobj.group(1))
dct.update(parse_codecs(mime_mobj.group(2)))
-
+ if itag:
+ itags[itag].add(('https', dct.get('language')))
+ stream_ids.append(stream_id)
single_stream = 'none' in (dct.get('acodec'), dct.get('vcodec'))
if single_stream and dct.get('ext'):
dct['container'] = dct['ext'] + '_dash'
- if single_stream or itag == '17':
- CHUNK_SIZE = 10 << 20
- dct.update({
+
+ CHUNK_SIZE = 10 << 20
+ if dct['filesize']:
+ yield {
+ **dct,
+ 'format_id': f'{dct["format_id"]}-dashy' if all_formats else dct['format_id'],
'protocol': 'http_dash_segments',
'fragments': [{
'url': update_url_query(dct['url'], {
'range': f'{range_start}-{min(range_start + CHUNK_SIZE - 1, dct["filesize"])}'
})
} for range_start in range(0, dct['filesize'], CHUNK_SIZE)]
- } if itag != '17' and dct['filesize'] else {
- 'downloader_options': {'http_chunk_size': CHUNK_SIZE}
- })
-
- if itag:
- itags[itag].add(('https', dct.get('language')))
- stream_ids.append(stream_id)
+ }
+ if not all_formats:
+ continue
+ dct['downloader_options'] = {'http_chunk_size': CHUNK_SIZE}
yield dct
needs_live_processing = self._needs_live_processing(live_status, duration)
def process_manifest_format(f, proto, client_name, itag):
key = (proto, f.get('language'))
- if key in itags[itag]:
+ if not all_formats and key in itags[itag]:
return False
itags[itag].add(key)
- if any(p != proto for p, _ in itags[itag]):
+ if itag and all_formats:
+ f['format_id'] = f'{itag}-{proto}'
+ elif any(p != proto for p, _ in itags[itag]):
f['format_id'] = f'{itag}-{proto}'
elif itag:
f['format_id'] = itag