]>
jfr.im git - yt-dlp.git/blob - test/test_download.py
3 # Allow direct execution
8 sys
.path
.insert(0, os
.path
.dirname(os
.path
.dirname(os
.path
.abspath(__file__
))))
15 from test
.helper
import (
27 import yt_dlp
.YoutubeDL
# isort: split
28 from yt_dlp
.extractor
import get_info_extractor
29 from yt_dlp
.networking
.exceptions
import HTTPError
, TransportError
30 from yt_dlp
.utils
import (
33 UnavailableVideoError
,
42 class YoutubeDL(yt_dlp
.YoutubeDL
):
43 def __init__(self
, *args
, **kwargs
):
44 self
.to_stderr
= self
.to_screen
45 self
.processed_info_dicts
= []
46 super().__init
__(*args
, **kwargs
)
48 def report_warning(self
, message
, *args
, **kwargs
):
49 # Don't accept warnings during tests
50 raise ExtractorError(message
)
52 def process_info(self
, info_dict
):
53 self
.processed_info_dicts
.append(info_dict
.copy())
54 return super().process_info(info_dict
)
58 with open(fn
, 'rb') as f
:
59 return hashlib
.md5(f
.read()).hexdigest()
62 normal_test_cases
= gettestcases()
63 webpage_test_cases
= getwebpagetestcases()
64 tests_counter
= collections
.defaultdict(collections
.Counter
)
68 class TestDownload(unittest
.TestCase
):
69 # Parallel testing in nosetests. See
70 # http://nose.readthedocs.org/en/latest/doc_tests/test_multiprocess/multiprocess.html
71 _multiprocess_shared_
= True
78 """Identify each test with the `add_ie` attribute, if available."""
79 cls
, add_ie
= type(self
), getattr(self
, self
._testMethodName
).add_ie
80 return f
'{self._testMethodName} ({cls.__module__}.{cls.__name__}){f" [{add_ie}]" if add_ie else ""}:'
83 # Dynamically generate tests
85 def generator(test_case
, tname
):
86 def test_template(self
):
87 if self
.COMPLETED_TESTS
.get(tname
):
89 self
.COMPLETED_TESTS
[tname
] = True
90 ie
= yt_dlp
.extractor
.get_info_extractor(test_case
['name'])()
91 other_ies
= [get_info_extractor(ie_key
)() for ie_key
in test_case
.get('add_ie', [])]
92 is_playlist
= any(k
.startswith('playlist') for k
in test_case
)
93 test_cases
= test_case
.get(
94 'playlist', [] if is_playlist
else [test_case
])
96 def print_skipping(reason
):
97 print('Skipping %s: %s' % (test_case
['name'], reason
))
101 print_skipping('IE marked as not _WORKING')
103 for tc
in test_cases
:
104 if tc
.get('expected_exception'):
106 info_dict
= tc
.get('info_dict', {})
107 params
= tc
.get('params', {})
108 if not info_dict
.get('id'):
109 raise Exception(f
'Test {tname} definition incorrect - "id" key is not present')
110 elif not info_dict
.get('ext') and info_dict
.get('_type', 'video') == 'video':
111 if params
.get('skip_download') and params
.get('ignore_no_formats_error'):
113 raise Exception(f
'Test {tname} definition incorrect - "ext" key must be present to define the output file')
115 if 'skip' in test_case
:
116 print_skipping(test_case
['skip'])
118 for other_ie
in other_ies
:
119 if not other_ie
.working():
120 print_skipping('test depends on %sIE, marked as not WORKING' % other_ie
.ie_key())
122 params
= get_params(test_case
.get('params', {}))
123 params
['outtmpl'] = tname
+ '_' + params
['outtmpl']
124 if is_playlist
and 'playlist' not in test_case
:
125 params
.setdefault('extract_flat', 'in_playlist')
126 params
.setdefault('playlistend', test_case
.get(
127 'playlist_mincount', test_case
.get('playlist_count', -2) + 1))
128 params
.setdefault('skip_download', True)
130 ydl
= YoutubeDL(params
, auto_init
=False)
131 ydl
.add_default_info_extractors()
132 finished_hook_called
= set()
135 if status
['status'] == 'finished':
136 finished_hook_called
.add(status
['filename'])
137 ydl
.add_progress_hook(_hook
)
138 expect_warnings(ydl
, test_case
.get('expected_warnings', []))
140 def get_tc_filename(tc
):
141 return ydl
.prepare_filename(dict(tc
.get('info_dict', {})))
145 def match_exception(err
):
146 expected_exception
= test_case
.get('expected_exception')
147 if not expected_exception
:
149 if err
.__class
__.__name
__ == expected_exception
:
151 for exc
in err
.exc_info
:
152 if exc
.__class
__.__name
__ == expected_exception
:
156 def try_rm_tcs_files(tcs
=None):
160 tc_filename
= get_tc_filename(tc
)
162 try_rm(tc_filename
+ '.part')
163 try_rm(os
.path
.splitext(tc_filename
)[0] + '.info.json')
169 # We're not using .download here since that is just a shim
170 # for outside error handling, and returns the exit code
171 # instead of the result dict.
172 res_dict
= ydl
.extract_info(
174 force_generic_extractor
=params
.get('force_generic_extractor', False))
175 except (DownloadError
, ExtractorError
) as err
:
176 # Check if the exception is not a network related one
177 if not isinstance(err
.exc_info
[1], (TransportError
, UnavailableVideoError
)) or (isinstance(err
.exc_info
[1], HTTPError
) and err
.exc_info
[1].status
== 503):
178 if match_exception(err
):
180 err
.msg
= f
'{getattr(err, "msg", err)} ({tname})'
183 if try_num
== RETRIES
:
184 report_warning('%s failed due to network errors, skipping...' % tname
)
187 print(f
'Retrying: {try_num} failed tries\n\n##########\n\n')
190 except YoutubeDLError
as err
:
191 if match_exception(err
):
198 self
.assertTrue(res_dict
['_type'] in ['playlist', 'multi_video'])
199 self
.assertTrue('entries' in res_dict
)
200 expect_info_dict(self
, res_dict
, test_case
.get('info_dict', {}))
202 if 'playlist_mincount' in test_case
:
205 len(res_dict
['entries']),
206 test_case
['playlist_mincount'],
207 'Expected at least %d in playlist %s, but got only %d' % (
208 test_case
['playlist_mincount'], test_case
['url'],
209 len(res_dict
['entries'])))
210 if 'playlist_count' in test_case
:
212 len(res_dict
['entries']),
213 test_case
['playlist_count'],
214 'Expected %d entries in playlist %s, but got %d.' % (
215 test_case
['playlist_count'],
217 len(res_dict
['entries']),
219 if 'playlist_duration_sum' in test_case
:
220 got_duration
= sum(e
['duration'] for e
in res_dict
['entries'])
222 test_case
['playlist_duration_sum'], got_duration
)
224 # Generalize both playlists and single videos to unified format for
226 if 'entries' not in res_dict
:
227 res_dict
['entries'] = [res_dict
]
229 for tc_num
, tc
in enumerate(test_cases
):
230 tc_res_dict
= res_dict
['entries'][tc_num
]
231 # First, check test cases' data against extracted data alone
232 expect_info_dict(self
, tc_res_dict
, tc
.get('info_dict', {}))
233 if tc_res_dict
.get('_type', 'video') != 'video':
235 # Now, check downloaded file consistency
236 tc_filename
= get_tc_filename(tc
)
237 if not test_case
.get('params', {}).get('skip_download', False):
238 self
.assertTrue(os
.path
.exists(tc_filename
), msg
='Missing file ' + tc_filename
)
239 self
.assertTrue(tc_filename
in finished_hook_called
)
240 expected_minsize
= tc
.get('file_minsize', 10000)
241 if expected_minsize
is not None:
242 if params
.get('test'):
243 expected_minsize
= max(expected_minsize
, 10000)
244 got_fsize
= os
.path
.getsize(tc_filename
)
246 self
, got_fsize
, expected_minsize
,
247 'Expected %s to be at least %s, but it\'s only %s ' %
248 (tc_filename
, format_bytes(expected_minsize
),
249 format_bytes(got_fsize
)))
251 md5_for_file
= _file_md5(tc_filename
)
252 self
.assertEqual(tc
['md5'], md5_for_file
)
253 # Finally, check test cases' data again but this time against
254 # extracted data from info JSON file written during processing
255 info_json_fn
= os
.path
.splitext(tc_filename
)[0] + '.info.json'
257 os
.path
.exists(info_json_fn
),
258 'Missing info file %s' % info_json_fn
)
259 with open(info_json_fn
, encoding
='utf-8') as infof
:
260 info_dict
= json
.load(infof
)
261 expect_info_dict(self
, info_dict
, tc
.get('info_dict', {}))
264 if is_playlist
and res_dict
is not None and res_dict
.get('entries'):
265 # Remove all other files that may have been extracted if the
266 # extractor returns full results even with extract_flat
267 res_tcs
= [{'info_dict': e}
for e
in res_dict
['entries']]
268 try_rm_tcs_files(res_tcs
)
273 # And add them to TestDownload
274 def inject_tests(test_cases
, label
=''):
275 for test_case
in test_cases
:
276 name
= test_case
['name']
277 tname
= join_nonempty('test', name
, label
, tests_counter
[name
][label
], delim
='_')
278 tests_counter
[name
][label
] += 1
280 test_method
= generator(test_case
, tname
)
281 test_method
.__name
__ = tname
282 test_method
.add_ie
= ','.join(test_case
.get('add_ie', []))
283 setattr(TestDownload
, test_method
.__name
__, test_method
)
286 inject_tests(normal_test_cases
)
288 # TODO: disable redirection to the IE to ensure we are actually testing the webpage extraction
289 inject_tests(webpage_test_cases
, 'webpage')
292 def batch_generator(name
):
293 def test_template(self
):
294 for label
, num_tests
in tests_counter
[name
].items():
295 for i
in range(num_tests
):
296 test_name
= join_nonempty('test', name
, label
, i
, delim
='_')
298 getattr(self
, test_name
)()
299 except unittest
.SkipTest
:
300 print(f
'Skipped {test_name}')
305 for name
in tests_counter
:
306 test_method
= batch_generator(name
)
307 test_method
.__name
__ = f
'test_{name}_all'
308 test_method
.add_ie
= ''
309 setattr(TestDownload
, test_method
.__name
__, test_method
)
313 if __name__
== '__main__':