]>
Commit | Line | Data |
---|---|---|
1 | import enum | |
2 | import json | |
3 | import os | |
4 | import re | |
5 | import subprocess | |
6 | import sys | |
7 | import tempfile | |
8 | import time | |
9 | import uuid | |
10 | ||
11 | from .fragment import FragmentFD | |
12 | from ..compat import functools | |
13 | from ..networking import Request | |
14 | from ..postprocessor.ffmpeg import EXT_TO_OUT_FORMATS, FFmpegPostProcessor | |
15 | from ..utils import ( | |
16 | Popen, | |
17 | RetryManager, | |
18 | _configuration_args, | |
19 | check_executable, | |
20 | classproperty, | |
21 | cli_bool_option, | |
22 | cli_option, | |
23 | cli_valueless_option, | |
24 | determine_ext, | |
25 | encodeArgument, | |
26 | encodeFilename, | |
27 | find_available_port, | |
28 | remove_end, | |
29 | traverse_obj, | |
30 | ) | |
31 | ||
32 | ||
33 | class Features(enum.Enum): | |
34 | TO_STDOUT = enum.auto() | |
35 | MULTIPLE_FORMATS = enum.auto() | |
36 | ||
37 | ||
38 | class ExternalFD(FragmentFD): | |
39 | SUPPORTED_PROTOCOLS = ('http', 'https', 'ftp', 'ftps') | |
40 | SUPPORTED_FEATURES = () | |
41 | _CAPTURE_STDERR = True | |
42 | ||
43 | def real_download(self, filename, info_dict): | |
44 | self.report_destination(filename) | |
45 | tmpfilename = self.temp_name(filename) | |
46 | self._cookies_tempfile = None | |
47 | ||
48 | try: | |
49 | started = time.time() | |
50 | retval = self._call_downloader(tmpfilename, info_dict) | |
51 | except KeyboardInterrupt: | |
52 | if not info_dict.get('is_live'): | |
53 | raise | |
54 | # Live stream downloading cancellation should be considered as | |
55 | # correct and expected termination thus all postprocessing | |
56 | # should take place | |
57 | retval = 0 | |
58 | self.to_screen('[%s] Interrupted by user' % self.get_basename()) | |
59 | finally: | |
60 | if self._cookies_tempfile: | |
61 | self.try_remove(self._cookies_tempfile) | |
62 | ||
63 | if retval == 0: | |
64 | status = { | |
65 | 'filename': filename, | |
66 | 'status': 'finished', | |
67 | 'elapsed': time.time() - started, | |
68 | } | |
69 | if filename != '-': | |
70 | fsize = os.path.getsize(encodeFilename(tmpfilename)) | |
71 | self.try_rename(tmpfilename, filename) | |
72 | status.update({ | |
73 | 'downloaded_bytes': fsize, | |
74 | 'total_bytes': fsize, | |
75 | }) | |
76 | self._hook_progress(status, info_dict) | |
77 | return True | |
78 | else: | |
79 | self.to_stderr('\n') | |
80 | self.report_error('%s exited with code %d' % ( | |
81 | self.get_basename(), retval)) | |
82 | return False | |
83 | ||
84 | @classmethod | |
85 | def get_basename(cls): | |
86 | return cls.__name__[:-2].lower() | |
87 | ||
88 | @classproperty | |
89 | def EXE_NAME(cls): | |
90 | return cls.get_basename() | |
91 | ||
92 | @functools.cached_property | |
93 | def exe(self): | |
94 | return self.EXE_NAME | |
95 | ||
96 | @classmethod | |
97 | def available(cls, path=None): | |
98 | path = check_executable( | |
99 | cls.EXE_NAME if path in (None, cls.get_basename()) else path, | |
100 | [cls.AVAILABLE_OPT]) | |
101 | if not path: | |
102 | return False | |
103 | cls.exe = path | |
104 | return path | |
105 | ||
106 | @classmethod | |
107 | def supports(cls, info_dict): | |
108 | return all(( | |
109 | not info_dict.get('to_stdout') or Features.TO_STDOUT in cls.SUPPORTED_FEATURES, | |
110 | '+' not in info_dict['protocol'] or Features.MULTIPLE_FORMATS in cls.SUPPORTED_FEATURES, | |
111 | not traverse_obj(info_dict, ('hls_aes', ...), 'extra_param_to_segment_url'), | |
112 | all(proto in cls.SUPPORTED_PROTOCOLS for proto in info_dict['protocol'].split('+')), | |
113 | )) | |
114 | ||
115 | @classmethod | |
116 | def can_download(cls, info_dict, path=None): | |
117 | return cls.available(path) and cls.supports(info_dict) | |
118 | ||
119 | def _option(self, command_option, param): | |
120 | return cli_option(self.params, command_option, param) | |
121 | ||
122 | def _bool_option(self, command_option, param, true_value='true', false_value='false', separator=None): | |
123 | return cli_bool_option(self.params, command_option, param, true_value, false_value, separator) | |
124 | ||
125 | def _valueless_option(self, command_option, param, expected_value=True): | |
126 | return cli_valueless_option(self.params, command_option, param, expected_value) | |
127 | ||
128 | def _configuration_args(self, keys=None, *args, **kwargs): | |
129 | return _configuration_args( | |
130 | self.get_basename(), self.params.get('external_downloader_args'), self.EXE_NAME, | |
131 | keys, *args, **kwargs) | |
132 | ||
133 | def _write_cookies(self): | |
134 | if not self.ydl.cookiejar.filename: | |
135 | tmp_cookies = tempfile.NamedTemporaryFile(suffix='.cookies', delete=False) | |
136 | tmp_cookies.close() | |
137 | self._cookies_tempfile = tmp_cookies.name | |
138 | self.to_screen(f'[download] Writing temporary cookies file to "{self._cookies_tempfile}"') | |
139 | # real_download resets _cookies_tempfile; if it's None then save() will write to cookiejar.filename | |
140 | self.ydl.cookiejar.save(self._cookies_tempfile) | |
141 | return self.ydl.cookiejar.filename or self._cookies_tempfile | |
142 | ||
143 | def _call_downloader(self, tmpfilename, info_dict): | |
144 | """ Either overwrite this or implement _make_cmd """ | |
145 | cmd = [encodeArgument(a) for a in self._make_cmd(tmpfilename, info_dict)] | |
146 | ||
147 | self._debug_cmd(cmd) | |
148 | ||
149 | if 'fragments' not in info_dict: | |
150 | _, stderr, returncode = self._call_process(cmd, info_dict) | |
151 | if returncode and stderr: | |
152 | self.to_stderr(stderr) | |
153 | return returncode | |
154 | ||
155 | skip_unavailable_fragments = self.params.get('skip_unavailable_fragments', True) | |
156 | ||
157 | retry_manager = RetryManager(self.params.get('fragment_retries'), self.report_retry, | |
158 | frag_index=None, fatal=not skip_unavailable_fragments) | |
159 | for retry in retry_manager: | |
160 | _, stderr, returncode = self._call_process(cmd, info_dict) | |
161 | if not returncode: | |
162 | break | |
163 | # TODO: Decide whether to retry based on error code | |
164 | # https://aria2.github.io/manual/en/html/aria2c.html#exit-status | |
165 | if stderr: | |
166 | self.to_stderr(stderr) | |
167 | retry.error = Exception() | |
168 | continue | |
169 | if not skip_unavailable_fragments and retry_manager.error: | |
170 | return -1 | |
171 | ||
172 | decrypt_fragment = self.decrypter(info_dict) | |
173 | dest, _ = self.sanitize_open(tmpfilename, 'wb') | |
174 | for frag_index, fragment in enumerate(info_dict['fragments']): | |
175 | fragment_filename = '%s-Frag%d' % (tmpfilename, frag_index) | |
176 | try: | |
177 | src, _ = self.sanitize_open(fragment_filename, 'rb') | |
178 | except OSError as err: | |
179 | if skip_unavailable_fragments and frag_index > 1: | |
180 | self.report_skip_fragment(frag_index, err) | |
181 | continue | |
182 | self.report_error(f'Unable to open fragment {frag_index}; {err}') | |
183 | return -1 | |
184 | dest.write(decrypt_fragment(fragment, src.read())) | |
185 | src.close() | |
186 | if not self.params.get('keep_fragments', False): | |
187 | self.try_remove(encodeFilename(fragment_filename)) | |
188 | dest.close() | |
189 | self.try_remove(encodeFilename('%s.frag.urls' % tmpfilename)) | |
190 | return 0 | |
191 | ||
192 | def _call_process(self, cmd, info_dict): | |
193 | return Popen.run(cmd, text=True, stderr=subprocess.PIPE if self._CAPTURE_STDERR else None) | |
194 | ||
195 | ||
196 | class CurlFD(ExternalFD): | |
197 | AVAILABLE_OPT = '-V' | |
198 | _CAPTURE_STDERR = False # curl writes the progress to stderr | |
199 | ||
200 | def _make_cmd(self, tmpfilename, info_dict): | |
201 | cmd = [self.exe, '--location', '-o', tmpfilename, '--compressed'] | |
202 | cookie_header = self.ydl.cookiejar.get_cookie_header(info_dict['url']) | |
203 | if cookie_header: | |
204 | cmd += ['--cookie', cookie_header] | |
205 | if info_dict.get('http_headers') is not None: | |
206 | for key, val in info_dict['http_headers'].items(): | |
207 | cmd += ['--header', f'{key}: {val}'] | |
208 | ||
209 | cmd += self._bool_option('--continue-at', 'continuedl', '-', '0') | |
210 | cmd += self._valueless_option('--silent', 'noprogress') | |
211 | cmd += self._valueless_option('--verbose', 'verbose') | |
212 | cmd += self._option('--limit-rate', 'ratelimit') | |
213 | retry = self._option('--retry', 'retries') | |
214 | if len(retry) == 2: | |
215 | if retry[1] in ('inf', 'infinite'): | |
216 | retry[1] = '2147483647' | |
217 | cmd += retry | |
218 | cmd += self._option('--max-filesize', 'max_filesize') | |
219 | cmd += self._option('--interface', 'source_address') | |
220 | cmd += self._option('--proxy', 'proxy') | |
221 | cmd += self._valueless_option('--insecure', 'nocheckcertificate') | |
222 | cmd += self._configuration_args() | |
223 | cmd += ['--', info_dict['url']] | |
224 | return cmd | |
225 | ||
226 | ||
227 | class AxelFD(ExternalFD): | |
228 | AVAILABLE_OPT = '-V' | |
229 | ||
230 | def _make_cmd(self, tmpfilename, info_dict): | |
231 | cmd = [self.exe, '-o', tmpfilename] | |
232 | if info_dict.get('http_headers') is not None: | |
233 | for key, val in info_dict['http_headers'].items(): | |
234 | cmd += ['-H', f'{key}: {val}'] | |
235 | cookie_header = self.ydl.cookiejar.get_cookie_header(info_dict['url']) | |
236 | if cookie_header: | |
237 | cmd += ['-H', f'Cookie: {cookie_header}', '--max-redirect=0'] | |
238 | cmd += self._configuration_args() | |
239 | cmd += ['--', info_dict['url']] | |
240 | return cmd | |
241 | ||
242 | ||
243 | class WgetFD(ExternalFD): | |
244 | AVAILABLE_OPT = '--version' | |
245 | ||
246 | def _make_cmd(self, tmpfilename, info_dict): | |
247 | cmd = [self.exe, '-O', tmpfilename, '-nv', '--compression=auto'] | |
248 | if self.ydl.cookiejar.get_cookie_header(info_dict['url']): | |
249 | cmd += ['--load-cookies', self._write_cookies()] | |
250 | if info_dict.get('http_headers') is not None: | |
251 | for key, val in info_dict['http_headers'].items(): | |
252 | cmd += ['--header', f'{key}: {val}'] | |
253 | cmd += self._option('--limit-rate', 'ratelimit') | |
254 | retry = self._option('--tries', 'retries') | |
255 | if len(retry) == 2: | |
256 | if retry[1] in ('inf', 'infinite'): | |
257 | retry[1] = '0' | |
258 | cmd += retry | |
259 | cmd += self._option('--bind-address', 'source_address') | |
260 | proxy = self.params.get('proxy') | |
261 | if proxy: | |
262 | for var in ('http_proxy', 'https_proxy'): | |
263 | cmd += ['--execute', f'{var}={proxy}'] | |
264 | cmd += self._valueless_option('--no-check-certificate', 'nocheckcertificate') | |
265 | cmd += self._configuration_args() | |
266 | cmd += ['--', info_dict['url']] | |
267 | return cmd | |
268 | ||
269 | ||
270 | class Aria2cFD(ExternalFD): | |
271 | AVAILABLE_OPT = '-v' | |
272 | SUPPORTED_PROTOCOLS = ('http', 'https', 'ftp', 'ftps', 'dash_frag_urls', 'm3u8_frag_urls') | |
273 | ||
274 | @staticmethod | |
275 | def supports_manifest(manifest): | |
276 | UNSUPPORTED_FEATURES = [ | |
277 | r'#EXT-X-BYTERANGE', # playlists composed of byte ranges of media files [1] | |
278 | # 1. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.2.2 | |
279 | ] | |
280 | check_results = (not re.search(feature, manifest) for feature in UNSUPPORTED_FEATURES) | |
281 | return all(check_results) | |
282 | ||
283 | @staticmethod | |
284 | def _aria2c_filename(fn): | |
285 | return fn if os.path.isabs(fn) else f'.{os.path.sep}{fn}' | |
286 | ||
287 | def _call_downloader(self, tmpfilename, info_dict): | |
288 | # FIXME: Disabled due to https://github.com/yt-dlp/yt-dlp/issues/5931 | |
289 | if False and 'no-external-downloader-progress' not in self.params.get('compat_opts', []): | |
290 | info_dict['__rpc'] = { | |
291 | 'port': find_available_port() or 19190, | |
292 | 'secret': str(uuid.uuid4()), | |
293 | } | |
294 | return super()._call_downloader(tmpfilename, info_dict) | |
295 | ||
296 | def _make_cmd(self, tmpfilename, info_dict): | |
297 | cmd = [self.exe, '-c', '--no-conf', | |
298 | '--console-log-level=warn', '--summary-interval=0', '--download-result=hide', | |
299 | '--http-accept-gzip=true', '--file-allocation=none', '-x16', '-j16', '-s16'] | |
300 | if 'fragments' in info_dict: | |
301 | cmd += ['--allow-overwrite=true', '--allow-piece-length-change=true'] | |
302 | else: | |
303 | cmd += ['--min-split-size', '1M'] | |
304 | ||
305 | if self.ydl.cookiejar.get_cookie_header(info_dict['url']): | |
306 | cmd += [f'--load-cookies={self._write_cookies()}'] | |
307 | if info_dict.get('http_headers') is not None: | |
308 | for key, val in info_dict['http_headers'].items(): | |
309 | cmd += ['--header', f'{key}: {val}'] | |
310 | cmd += self._option('--max-overall-download-limit', 'ratelimit') | |
311 | cmd += self._option('--interface', 'source_address') | |
312 | cmd += self._option('--all-proxy', 'proxy') | |
313 | cmd += self._bool_option('--check-certificate', 'nocheckcertificate', 'false', 'true', '=') | |
314 | cmd += self._bool_option('--remote-time', 'updatetime', 'true', 'false', '=') | |
315 | cmd += self._bool_option('--show-console-readout', 'noprogress', 'false', 'true', '=') | |
316 | cmd += self._configuration_args() | |
317 | ||
318 | if '__rpc' in info_dict: | |
319 | cmd += [ | |
320 | '--enable-rpc', | |
321 | f'--rpc-listen-port={info_dict["__rpc"]["port"]}', | |
322 | f'--rpc-secret={info_dict["__rpc"]["secret"]}'] | |
323 | ||
324 | # aria2c strips out spaces from the beginning/end of filenames and paths. | |
325 | # We work around this issue by adding a "./" to the beginning of the | |
326 | # filename and relative path, and adding a "/" at the end of the path. | |
327 | # See: https://github.com/yt-dlp/yt-dlp/issues/276 | |
328 | # https://github.com/ytdl-org/youtube-dl/issues/20312 | |
329 | # https://github.com/aria2/aria2/issues/1373 | |
330 | dn = os.path.dirname(tmpfilename) | |
331 | if dn: | |
332 | cmd += ['--dir', self._aria2c_filename(dn) + os.path.sep] | |
333 | if 'fragments' not in info_dict: | |
334 | cmd += ['--out', self._aria2c_filename(os.path.basename(tmpfilename))] | |
335 | cmd += ['--auto-file-renaming=false'] | |
336 | ||
337 | if 'fragments' in info_dict: | |
338 | cmd += ['--uri-selector=inorder'] | |
339 | url_list_file = '%s.frag.urls' % tmpfilename | |
340 | url_list = [] | |
341 | for frag_index, fragment in enumerate(info_dict['fragments']): | |
342 | fragment_filename = '%s-Frag%d' % (os.path.basename(tmpfilename), frag_index) | |
343 | url_list.append('%s\n\tout=%s' % (fragment['url'], self._aria2c_filename(fragment_filename))) | |
344 | stream, _ = self.sanitize_open(url_list_file, 'wb') | |
345 | stream.write('\n'.join(url_list).encode()) | |
346 | stream.close() | |
347 | cmd += ['-i', self._aria2c_filename(url_list_file)] | |
348 | else: | |
349 | cmd += ['--', info_dict['url']] | |
350 | return cmd | |
351 | ||
352 | def aria2c_rpc(self, rpc_port, rpc_secret, method, params=()): | |
353 | # Does not actually need to be UUID, just unique | |
354 | sanitycheck = str(uuid.uuid4()) | |
355 | d = json.dumps({ | |
356 | 'jsonrpc': '2.0', | |
357 | 'id': sanitycheck, | |
358 | 'method': method, | |
359 | 'params': [f'token:{rpc_secret}', *params], | |
360 | }).encode('utf-8') | |
361 | request = Request( | |
362 | f'http://localhost:{rpc_port}/jsonrpc', | |
363 | data=d, headers={ | |
364 | 'Content-Type': 'application/json', | |
365 | 'Content-Length': f'{len(d)}', | |
366 | }, proxies={'all': None}) | |
367 | with self.ydl.urlopen(request) as r: | |
368 | resp = json.load(r) | |
369 | assert resp.get('id') == sanitycheck, 'Something went wrong with RPC server' | |
370 | return resp['result'] | |
371 | ||
372 | def _call_process(self, cmd, info_dict): | |
373 | if '__rpc' not in info_dict: | |
374 | return super()._call_process(cmd, info_dict) | |
375 | ||
376 | send_rpc = functools.partial(self.aria2c_rpc, info_dict['__rpc']['port'], info_dict['__rpc']['secret']) | |
377 | started = time.time() | |
378 | ||
379 | fragmented = 'fragments' in info_dict | |
380 | frag_count = len(info_dict['fragments']) if fragmented else 1 | |
381 | status = { | |
382 | 'filename': info_dict.get('_filename'), | |
383 | 'status': 'downloading', | |
384 | 'elapsed': 0, | |
385 | 'downloaded_bytes': 0, | |
386 | 'fragment_count': frag_count if fragmented else None, | |
387 | 'fragment_index': 0 if fragmented else None, | |
388 | } | |
389 | self._hook_progress(status, info_dict) | |
390 | ||
391 | def get_stat(key, *obj, average=False): | |
392 | val = tuple(filter(None, map(float, traverse_obj(obj, (..., ..., key))))) or [0] | |
393 | return sum(val) / (len(val) if average else 1) | |
394 | ||
395 | with Popen(cmd, text=True, stdout=subprocess.DEVNULL, stderr=subprocess.PIPE) as p: | |
396 | # Add a small sleep so that RPC client can receive response, | |
397 | # or the connection stalls infinitely | |
398 | time.sleep(0.2) | |
399 | retval = p.poll() | |
400 | while retval is None: | |
401 | # We don't use tellStatus as we won't know the GID without reading stdout | |
402 | # Ref: https://aria2.github.io/manual/en/html/aria2c.html#aria2.tellActive | |
403 | active = send_rpc('aria2.tellActive') | |
404 | completed = send_rpc('aria2.tellStopped', [0, frag_count]) | |
405 | ||
406 | downloaded = get_stat('totalLength', completed) + get_stat('completedLength', active) | |
407 | speed = get_stat('downloadSpeed', active) | |
408 | total = frag_count * get_stat('totalLength', active, completed, average=True) | |
409 | if total < downloaded: | |
410 | total = None | |
411 | ||
412 | status.update({ | |
413 | 'downloaded_bytes': int(downloaded), | |
414 | 'speed': speed, | |
415 | 'total_bytes': None if fragmented else total, | |
416 | 'total_bytes_estimate': total, | |
417 | 'eta': (total - downloaded) / (speed or 1), | |
418 | 'fragment_index': min(frag_count, len(completed) + 1) if fragmented else None, | |
419 | 'elapsed': time.time() - started | |
420 | }) | |
421 | self._hook_progress(status, info_dict) | |
422 | ||
423 | if not active and len(completed) >= frag_count: | |
424 | send_rpc('aria2.shutdown') | |
425 | retval = p.wait() | |
426 | break | |
427 | ||
428 | time.sleep(0.1) | |
429 | retval = p.poll() | |
430 | ||
431 | return '', p.stderr.read(), retval | |
432 | ||
433 | ||
434 | class HttpieFD(ExternalFD): | |
435 | AVAILABLE_OPT = '--version' | |
436 | EXE_NAME = 'http' | |
437 | ||
438 | def _make_cmd(self, tmpfilename, info_dict): | |
439 | cmd = ['http', '--download', '--output', tmpfilename, info_dict['url']] | |
440 | ||
441 | if info_dict.get('http_headers') is not None: | |
442 | for key, val in info_dict['http_headers'].items(): | |
443 | cmd += [f'{key}:{val}'] | |
444 | ||
445 | # httpie 3.1.0+ removes the Cookie header on redirect, so this should be safe for now. [1] | |
446 | # If we ever need cookie handling for redirects, we can export the cookiejar into a session. [2] | |
447 | # 1: https://github.com/httpie/httpie/security/advisories/GHSA-9w4w-cpc8-h2fq | |
448 | # 2: https://httpie.io/docs/cli/sessions | |
449 | cookie_header = self.ydl.cookiejar.get_cookie_header(info_dict['url']) | |
450 | if cookie_header: | |
451 | cmd += [f'Cookie:{cookie_header}'] | |
452 | return cmd | |
453 | ||
454 | ||
455 | class FFmpegFD(ExternalFD): | |
456 | SUPPORTED_PROTOCOLS = ('http', 'https', 'ftp', 'ftps', 'm3u8', 'm3u8_native', 'rtsp', 'rtmp', 'rtmp_ffmpeg', 'mms', 'http_dash_segments') | |
457 | SUPPORTED_FEATURES = (Features.TO_STDOUT, Features.MULTIPLE_FORMATS) | |
458 | ||
459 | @classmethod | |
460 | def available(cls, path=None): | |
461 | # TODO: Fix path for ffmpeg | |
462 | # Fixme: This may be wrong when --ffmpeg-location is used | |
463 | return FFmpegPostProcessor().available | |
464 | ||
465 | def on_process_started(self, proc, stdin): | |
466 | """ Override this in subclasses """ | |
467 | pass | |
468 | ||
469 | @classmethod | |
470 | def can_merge_formats(cls, info_dict, params): | |
471 | return ( | |
472 | info_dict.get('requested_formats') | |
473 | and info_dict.get('protocol') | |
474 | and not params.get('allow_unplayable_formats') | |
475 | and 'no-direct-merge' not in params.get('compat_opts', []) | |
476 | and cls.can_download(info_dict)) | |
477 | ||
478 | def _call_downloader(self, tmpfilename, info_dict): | |
479 | ffpp = FFmpegPostProcessor(downloader=self) | |
480 | if not ffpp.available: | |
481 | self.report_error('m3u8 download detected but ffmpeg could not be found. Please install') | |
482 | return False | |
483 | ffpp.check_version() | |
484 | ||
485 | args = [ffpp.executable, '-y'] | |
486 | ||
487 | for log_level in ('quiet', 'verbose'): | |
488 | if self.params.get(log_level, False): | |
489 | args += ['-loglevel', log_level] | |
490 | break | |
491 | if not self.params.get('verbose'): | |
492 | args += ['-hide_banner'] | |
493 | ||
494 | args += traverse_obj(info_dict, ('downloader_options', 'ffmpeg_args', ...)) | |
495 | ||
496 | # These exists only for compatibility. Extractors should use | |
497 | # info_dict['downloader_options']['ffmpeg_args'] instead | |
498 | args += info_dict.get('_ffmpeg_args') or [] | |
499 | seekable = info_dict.get('_seekable') | |
500 | if seekable is not None: | |
501 | # setting -seekable prevents ffmpeg from guessing if the server | |
502 | # supports seeking(by adding the header `Range: bytes=0-`), which | |
503 | # can cause problems in some cases | |
504 | # https://github.com/ytdl-org/youtube-dl/issues/11800#issuecomment-275037127 | |
505 | # http://trac.ffmpeg.org/ticket/6125#comment:10 | |
506 | args += ['-seekable', '1' if seekable else '0'] | |
507 | ||
508 | env = None | |
509 | proxy = self.params.get('proxy') | |
510 | if proxy: | |
511 | if not re.match(r'^[\da-zA-Z]+://', proxy): | |
512 | proxy = 'http://%s' % proxy | |
513 | ||
514 | if proxy.startswith('socks'): | |
515 | self.report_warning( | |
516 | '%s does not support SOCKS proxies. Downloading is likely to fail. ' | |
517 | 'Consider adding --hls-prefer-native to your command.' % self.get_basename()) | |
518 | ||
519 | # Since December 2015 ffmpeg supports -http_proxy option (see | |
520 | # http://git.videolan.org/?p=ffmpeg.git;a=commit;h=b4eb1f29ebddd60c41a2eb39f5af701e38e0d3fd) | |
521 | # We could switch to the following code if we are able to detect version properly | |
522 | # args += ['-http_proxy', proxy] | |
523 | env = os.environ.copy() | |
524 | env['HTTP_PROXY'] = proxy | |
525 | env['http_proxy'] = proxy | |
526 | ||
527 | protocol = info_dict.get('protocol') | |
528 | ||
529 | if protocol == 'rtmp': | |
530 | player_url = info_dict.get('player_url') | |
531 | page_url = info_dict.get('page_url') | |
532 | app = info_dict.get('app') | |
533 | play_path = info_dict.get('play_path') | |
534 | tc_url = info_dict.get('tc_url') | |
535 | flash_version = info_dict.get('flash_version') | |
536 | live = info_dict.get('rtmp_live', False) | |
537 | conn = info_dict.get('rtmp_conn') | |
538 | if player_url is not None: | |
539 | args += ['-rtmp_swfverify', player_url] | |
540 | if page_url is not None: | |
541 | args += ['-rtmp_pageurl', page_url] | |
542 | if app is not None: | |
543 | args += ['-rtmp_app', app] | |
544 | if play_path is not None: | |
545 | args += ['-rtmp_playpath', play_path] | |
546 | if tc_url is not None: | |
547 | args += ['-rtmp_tcurl', tc_url] | |
548 | if flash_version is not None: | |
549 | args += ['-rtmp_flashver', flash_version] | |
550 | if live: | |
551 | args += ['-rtmp_live', 'live'] | |
552 | if isinstance(conn, list): | |
553 | for entry in conn: | |
554 | args += ['-rtmp_conn', entry] | |
555 | elif isinstance(conn, str): | |
556 | args += ['-rtmp_conn', conn] | |
557 | ||
558 | start_time, end_time = info_dict.get('section_start') or 0, info_dict.get('section_end') | |
559 | ||
560 | selected_formats = info_dict.get('requested_formats') or [info_dict] | |
561 | for i, fmt in enumerate(selected_formats): | |
562 | is_http = re.match(r'^https?://', fmt['url']) | |
563 | cookies = self.ydl.cookiejar.get_cookies_for_url(fmt['url']) if is_http else [] | |
564 | if cookies: | |
565 | args.extend(['-cookies', ''.join( | |
566 | f'{cookie.name}={cookie.value}; path={cookie.path}; domain={cookie.domain};\r\n' | |
567 | for cookie in cookies)]) | |
568 | if fmt.get('http_headers') and is_http: | |
569 | # Trailing \r\n after each HTTP header is important to prevent warning from ffmpeg/avconv: | |
570 | # [http @ 00000000003d2fa0] No trailing CRLF found in HTTP header. | |
571 | args.extend(['-headers', ''.join(f'{key}: {val}\r\n' for key, val in fmt['http_headers'].items())]) | |
572 | ||
573 | if start_time: | |
574 | args += ['-ss', str(start_time)] | |
575 | if end_time: | |
576 | args += ['-t', str(end_time - start_time)] | |
577 | ||
578 | args += self._configuration_args((f'_i{i + 1}', '_i')) + ['-i', fmt['url']] | |
579 | ||
580 | if not (start_time or end_time) or not self.params.get('force_keyframes_at_cuts'): | |
581 | args += ['-c', 'copy'] | |
582 | ||
583 | if info_dict.get('requested_formats') or protocol == 'http_dash_segments': | |
584 | for i, fmt in enumerate(selected_formats): | |
585 | stream_number = fmt.get('manifest_stream_number', 0) | |
586 | args.extend(['-map', f'{i}:{stream_number}']) | |
587 | ||
588 | if self.params.get('test', False): | |
589 | args += ['-fs', str(self._TEST_FILE_SIZE)] | |
590 | ||
591 | ext = info_dict['ext'] | |
592 | if protocol in ('m3u8', 'm3u8_native'): | |
593 | use_mpegts = (tmpfilename == '-') or self.params.get('hls_use_mpegts') | |
594 | if use_mpegts is None: | |
595 | use_mpegts = info_dict.get('is_live') | |
596 | if use_mpegts: | |
597 | args += ['-f', 'mpegts'] | |
598 | else: | |
599 | args += ['-f', 'mp4'] | |
600 | if (ffpp.basename == 'ffmpeg' and ffpp._features.get('needs_adtstoasc')) and (not info_dict.get('acodec') or info_dict['acodec'].split('.')[0] in ('aac', 'mp4a')): | |
601 | args += ['-bsf:a', 'aac_adtstoasc'] | |
602 | elif protocol == 'rtmp': | |
603 | args += ['-f', 'flv'] | |
604 | elif ext == 'mp4' and tmpfilename == '-': | |
605 | args += ['-f', 'mpegts'] | |
606 | elif ext == 'unknown_video': | |
607 | ext = determine_ext(remove_end(tmpfilename, '.part')) | |
608 | if ext == 'unknown_video': | |
609 | self.report_warning( | |
610 | 'The video format is unknown and cannot be downloaded by ffmpeg. ' | |
611 | 'Explicitly set the extension in the filename to attempt download in that format') | |
612 | else: | |
613 | self.report_warning(f'The video format is unknown. Trying to download as {ext} according to the filename') | |
614 | args += ['-f', EXT_TO_OUT_FORMATS.get(ext, ext)] | |
615 | else: | |
616 | args += ['-f', EXT_TO_OUT_FORMATS.get(ext, ext)] | |
617 | ||
618 | args += traverse_obj(info_dict, ('downloader_options', 'ffmpeg_args_out', ...)) | |
619 | ||
620 | args += self._configuration_args(('_o1', '_o', '')) | |
621 | ||
622 | args = [encodeArgument(opt) for opt in args] | |
623 | args.append(encodeFilename(ffpp._ffmpeg_filename_argument(tmpfilename), True)) | |
624 | self._debug_cmd(args) | |
625 | ||
626 | piped = any(fmt['url'] in ('-', 'pipe:') for fmt in selected_formats) | |
627 | with Popen(args, stdin=subprocess.PIPE, env=env) as proc: | |
628 | if piped: | |
629 | self.on_process_started(proc, proc.stdin) | |
630 | try: | |
631 | retval = proc.wait() | |
632 | except BaseException as e: | |
633 | # subprocces.run would send the SIGKILL signal to ffmpeg and the | |
634 | # mp4 file couldn't be played, but if we ask ffmpeg to quit it | |
635 | # produces a file that is playable (this is mostly useful for live | |
636 | # streams). Note that Windows is not affected and produces playable | |
637 | # files (see https://github.com/ytdl-org/youtube-dl/issues/8300). | |
638 | if isinstance(e, KeyboardInterrupt) and sys.platform != 'win32' and not piped: | |
639 | proc.communicate_or_kill(b'q') | |
640 | else: | |
641 | proc.kill(timeout=None) | |
642 | raise | |
643 | return retval | |
644 | ||
645 | ||
646 | class AVconvFD(FFmpegFD): | |
647 | pass | |
648 | ||
649 | ||
650 | _BY_NAME = { | |
651 | klass.get_basename(): klass | |
652 | for name, klass in globals().items() | |
653 | if name.endswith('FD') and name not in ('ExternalFD', 'FragmentFD') | |
654 | } | |
655 | ||
656 | ||
657 | def list_external_downloaders(): | |
658 | return sorted(_BY_NAME.keys()) | |
659 | ||
660 | ||
661 | def get_external_downloader(external_downloader): | |
662 | """ Given the name of the executable, see whether we support the given downloader """ | |
663 | bn = os.path.splitext(os.path.basename(external_downloader))[0] | |
664 | return _BY_NAME.get(bn) or next(( | |
665 | klass for klass in _BY_NAME.values() if klass.EXE_NAME in bn | |
666 | ), None) |