]>
jfr.im git - yt-dlp.git/blob - test/test_download.py
2 # Allow direct execution
7 sys
.path
.insert(0, os
.path
.dirname(os
.path
.dirname(os
.path
.abspath(__file__
))))
16 from test
.helper
import (
26 import yt_dlp
.YoutubeDL
# isort: split
27 from yt_dlp
.compat
import compat_HTTPError
28 from yt_dlp
.extractor
import get_info_extractor
29 from yt_dlp
.utils
import (
32 UnavailableVideoError
,
39 class YoutubeDL(yt_dlp
.YoutubeDL
):
40 def __init__(self
, *args
, **kwargs
):
41 self
.to_stderr
= self
.to_screen
42 self
.processed_info_dicts
= []
43 super().__init
__(*args
, **kwargs
)
45 def report_warning(self
, message
, *args
, **kwargs
):
46 # Don't accept warnings during tests
47 raise ExtractorError(message
)
49 def process_info(self
, info_dict
):
50 self
.processed_info_dicts
.append(info_dict
.copy())
51 return super().process_info(info_dict
)
55 with open(fn
, 'rb') as f
:
56 return hashlib
.md5(f
.read()).hexdigest()
63 class TestDownload(unittest
.TestCase
):
64 # Parallel testing in nosetests. See
65 # http://nose.readthedocs.org/en/latest/doc_tests/test_multiprocess/multiprocess.html
66 _multiprocess_shared_
= True
73 """Identify each test with the `add_ie` attribute, if available."""
76 """From 2.7's unittest; 2.6 had _strclass so we can't import it."""
77 return f
'{cls.__module__}.{cls.__name__}'
79 add_ie
= getattr(self
, self
._testMethodName
).add_ie
80 return '%s (%s)%s:' % (self
._testMethodName
,
81 strclass(self
.__class
__),
82 ' [%s]' % add_ie
if add_ie
else '')
87 # Dynamically generate tests
90 def generator(test_case
, tname
):
92 def test_template(self
):
93 if self
.COMPLETED_TESTS
.get(tname
):
95 self
.COMPLETED_TESTS
[tname
] = True
96 ie
= yt_dlp
.extractor
.get_info_extractor(test_case
['name'])()
97 other_ies
= [get_info_extractor(ie_key
)() for ie_key
in test_case
.get('add_ie', [])]
98 is_playlist
= any(k
.startswith('playlist') for k
in test_case
)
99 test_cases
= test_case
.get(
100 'playlist', [] if is_playlist
else [test_case
])
102 def print_skipping(reason
):
103 print('Skipping %s: %s' % (test_case
['name'], reason
))
104 self
.skipTest(reason
)
107 print_skipping('IE marked as not _WORKING')
109 for tc
in test_cases
:
110 info_dict
= tc
.get('info_dict', {})
111 params
= tc
.get('params', {})
112 if not info_dict
.get('id'):
113 raise Exception('Test definition incorrect. \'id\' key is not present')
114 elif not info_dict
.get('ext'):
115 if params
.get('skip_download') and params
.get('ignore_no_formats_error'):
117 raise Exception('Test definition incorrect. The output file cannot be known. \'ext\' key is not present')
119 if 'skip' in test_case
:
120 print_skipping(test_case
['skip'])
122 for other_ie
in other_ies
:
123 if not other_ie
.working():
124 print_skipping('test depends on %sIE, marked as not WORKING' % other_ie
.ie_key())
126 params
= get_params(test_case
.get('params', {}))
127 params
['outtmpl'] = tname
+ '_' + params
['outtmpl']
128 if is_playlist
and 'playlist' not in test_case
:
129 params
.setdefault('extract_flat', 'in_playlist')
130 params
.setdefault('playlistend', test_case
.get('playlist_mincount'))
131 params
.setdefault('skip_download', True)
133 ydl
= YoutubeDL(params
, auto_init
=False)
134 ydl
.add_default_info_extractors()
135 finished_hook_called
= set()
138 if status
['status'] == 'finished':
139 finished_hook_called
.add(status
['filename'])
140 ydl
.add_progress_hook(_hook
)
141 expect_warnings(ydl
, test_case
.get('expected_warnings', []))
143 def get_tc_filename(tc
):
144 return ydl
.prepare_filename(dict(tc
.get('info_dict', {})))
148 def try_rm_tcs_files(tcs
=None):
152 tc_filename
= get_tc_filename(tc
)
154 try_rm(tc_filename
+ '.part')
155 try_rm(os
.path
.splitext(tc_filename
)[0] + '.info.json')
161 # We're not using .download here since that is just a shim
162 # for outside error handling, and returns the exit code
163 # instead of the result dict.
164 res_dict
= ydl
.extract_info(
166 force_generic_extractor
=params
.get('force_generic_extractor', False))
167 except (DownloadError
, ExtractorError
) as err
:
168 # Check if the exception is not a network related one
169 if not err
.exc_info
[0] in (urllib
.error
.URLError
, socket
.timeout
, UnavailableVideoError
, http
.client
.BadStatusLine
) or (err
.exc_info
[0] == compat_HTTPError
and err
.exc_info
[1].code
== 503):
172 if try_num
== RETRIES
:
173 report_warning('%s failed due to network errors, skipping...' % tname
)
176 print(f
'Retrying: {try_num} failed tries\n\n##########\n\n')
183 self
.assertTrue(res_dict
['_type'] in ['playlist', 'multi_video'])
184 self
.assertTrue('entries' in res_dict
)
185 expect_info_dict(self
, res_dict
, test_case
.get('info_dict', {}))
187 if 'playlist_mincount' in test_case
:
190 len(res_dict
['entries']),
191 test_case
['playlist_mincount'],
192 'Expected at least %d in playlist %s, but got only %d' % (
193 test_case
['playlist_mincount'], test_case
['url'],
194 len(res_dict
['entries'])))
195 if 'playlist_count' in test_case
:
197 len(res_dict
['entries']),
198 test_case
['playlist_count'],
199 'Expected %d entries in playlist %s, but got %d.' % (
200 test_case
['playlist_count'],
202 len(res_dict
['entries']),
204 if 'playlist_duration_sum' in test_case
:
205 got_duration
= sum(e
['duration'] for e
in res_dict
['entries'])
207 test_case
['playlist_duration_sum'], got_duration
)
209 # Generalize both playlists and single videos to unified format for
211 if 'entries' not in res_dict
:
212 res_dict
['entries'] = [res_dict
]
214 for tc_num
, tc
in enumerate(test_cases
):
215 tc_res_dict
= res_dict
['entries'][tc_num
]
216 # First, check test cases' data against extracted data alone
217 expect_info_dict(self
, tc_res_dict
, tc
.get('info_dict', {}))
218 # Now, check downloaded file consistency
219 tc_filename
= get_tc_filename(tc
)
220 if not test_case
.get('params', {}).get('skip_download', False):
221 self
.assertTrue(os
.path
.exists(tc_filename
), msg
='Missing file ' + tc_filename
)
222 self
.assertTrue(tc_filename
in finished_hook_called
)
223 expected_minsize
= tc
.get('file_minsize', 10000)
224 if expected_minsize
is not None:
225 if params
.get('test'):
226 expected_minsize
= max(expected_minsize
, 10000)
227 got_fsize
= os
.path
.getsize(tc_filename
)
229 self
, got_fsize
, expected_minsize
,
230 'Expected %s to be at least %s, but it\'s only %s ' %
231 (tc_filename
, format_bytes(expected_minsize
),
232 format_bytes(got_fsize
)))
234 md5_for_file
= _file_md5(tc_filename
)
235 self
.assertEqual(tc
['md5'], md5_for_file
)
236 # Finally, check test cases' data again but this time against
237 # extracted data from info JSON file written during processing
238 info_json_fn
= os
.path
.splitext(tc_filename
)[0] + '.info.json'
240 os
.path
.exists(info_json_fn
),
241 'Missing info file %s' % info_json_fn
)
242 with open(info_json_fn
, encoding
='utf-8') as infof
:
243 info_dict
= json
.load(infof
)
244 expect_info_dict(self
, info_dict
, tc
.get('info_dict', {}))
247 if is_playlist
and res_dict
is not None and res_dict
.get('entries'):
248 # Remove all other files that may have been extracted if the
249 # extractor returns full results even with extract_flat
250 res_tcs
= [{'info_dict': e}
for e
in res_dict
['entries']]
251 try_rm_tcs_files(res_tcs
)
256 # And add them to TestDownload
258 for test_case
in defs
:
259 name
= test_case
['name']
260 i
= tests_counter
.get(name
, 0)
261 tests_counter
[name
] = i
+ 1
262 tname
= f
'test_{name}_{i}' if i
else f
'test_{name}'
263 test_method
= generator(test_case
, tname
)
264 test_method
.__name
__ = str(tname
)
265 ie_list
= test_case
.get('add_ie')
266 test_method
.add_ie
= ie_list
and ','.join(ie_list
)
267 setattr(TestDownload
, test_method
.__name
__, test_method
)
271 def batch_generator(name
, num_tests
):
273 def test_template(self
):
274 for i
in range(num_tests
):
275 getattr(self
, f
'test_{name}_{i}' if i
else f
'test_{name}')()
280 for name
, num_tests
in tests_counter
.items():
281 test_method
= batch_generator(name
, num_tests
)
282 test_method
.__name
__ = f
'test_{name}_all'
283 test_method
.add_ie
= ''
284 setattr(TestDownload
, test_method
.__name
__, test_method
)
288 if __name__
== '__main__':