]>
Commit | Line | Data |
---|---|---|
fd5ff020 FV |
1 | #!/usr/bin/env python |
2 | ||
5c892b0b | 3 | import errno |
efe8902f | 4 | import hashlib |
fd5ff020 | 5 | import io |
efe8902f | 6 | import os |
7f60b5aa | 7 | import json |
cdab8aa3 PH |
8 | import unittest |
9 | import sys | |
0eaf520d | 10 | import hashlib |
6b3aef80 | 11 | import socket |
fd5ff020 FV |
12 | |
13 | # Allow direct execution | |
14 | sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) | |
cdab8aa3 | 15 | |
fd5ff020 FV |
16 | import youtube_dl.FileDownloader |
17 | import youtube_dl.InfoExtractors | |
18 | from youtube_dl.utils import * | |
1535ac2a | 19 | |
20 | DEF_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tests.json') | |
fd5ff020 FV |
21 | PARAMETERS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "parameters.json") |
22 | ||
23 | # General configuration (from __init__, not very elegant...) | |
24 | jar = compat_cookiejar.CookieJar() | |
25 | cookie_processor = compat_urllib_request.HTTPCookieProcessor(jar) | |
26 | proxy_handler = compat_urllib_request.ProxyHandler() | |
27 | opener = compat_urllib_request.build_opener(proxy_handler, cookie_processor, YoutubeDLHandler()) | |
28 | compat_urllib_request.install_opener(opener) | |
fd5ff020 | 29 | |
5c892b0b PH |
30 | def _try_rm(filename): |
31 | """ Remove a file if it exists """ | |
32 | try: | |
33 | os.remove(filename) | |
34 | except OSError as ose: | |
35 | if ose.errno != errno.ENOENT: | |
36 | raise | |
37 | ||
fd5ff020 FV |
38 | class FileDownloader(youtube_dl.FileDownloader): |
39 | def __init__(self, *args, **kwargs): | |
fd5ff020 | 40 | self.to_stderr = self.to_screen |
0eaf520d FV |
41 | self.processed_info_dicts = [] |
42 | return youtube_dl.FileDownloader.__init__(self, *args, **kwargs) | |
43 | def process_info(self, info_dict): | |
44 | self.processed_info_dicts.append(info_dict) | |
45 | return youtube_dl.FileDownloader.process_info(self, info_dict) | |
1535ac2a | 46 | |
fd5ff020 FV |
47 | def _file_md5(fn): |
48 | with open(fn, 'rb') as f: | |
49 | return hashlib.md5(f.read()).hexdigest() | |
50 | ||
51 | with io.open(DEF_FILE, encoding='utf-8') as deff: | |
52 | defs = json.load(deff) | |
53 | with io.open(PARAMETERS_FILE, encoding='utf-8') as pf: | |
54 | parameters = json.load(pf) | |
1535ac2a | 55 | |
0eaf520d | 56 | |
1535ac2a | 57 | class TestDownload(unittest.TestCase): |
fd5ff020 FV |
58 | def setUp(self): |
59 | self.parameters = parameters | |
60 | self.defs = defs | |
61 | ||
911ee27e | 62 | ### Dynamically generate tests |
5d01a647 PH |
63 | def generator(test_case): |
64 | ||
1535ac2a | 65 | def test_template(self): |
fd5ff020 FV |
66 | ie = getattr(youtube_dl.InfoExtractors, test_case['name'] + 'IE') |
67 | if not ie._WORKING: | |
68 | print('Skipping: IE marked as not _WORKING') | |
69 | return | |
5c892b0b | 70 | if 'playlist' not in test_case and not test_case['file']: |
6985325e PH |
71 | print('Skipping: No output file specified') |
72 | return | |
fd5ff020 FV |
73 | if 'skip' in test_case: |
74 | print('Skipping: {0}'.format(test_case['skip'])) | |
75 | return | |
0eaf520d | 76 | |
c073e35b PH |
77 | params = self.parameters.copy() |
78 | params.update(test_case.get('params', {})) | |
0eaf520d | 79 | |
fd5ff020 FV |
80 | fd = FileDownloader(params) |
81 | fd.add_info_extractor(ie()) | |
82 | for ien in test_case.get('add_ie', []): | |
83 | fd.add_info_extractor(getattr(youtube_dl.InfoExtractors, ien + 'IE')()) | |
5c892b0b PH |
84 | |
85 | test_cases = test_case.get('playlist', [test_case]) | |
86 | for tc in test_cases: | |
87 | _try_rm(tc['file']) | |
88 | _try_rm(tc['file'] + '.info.json') | |
89 | try: | |
90 | fd.download([test_case['url']]) | |
91 | ||
92 | for tc in test_cases: | |
93 | self.assertTrue(os.path.exists(tc['file'])) | |
94 | self.assertTrue(os.path.exists(tc['file'] + '.info.json')) | |
95 | if 'md5' in tc: | |
96 | md5_for_file = _file_md5(tc['file']) | |
97 | self.assertEqual(md5_for_file, tc['md5']) | |
98 | with io.open(tc['file'] + '.info.json', encoding='utf-8') as infof: | |
99 | info_dict = json.load(infof) | |
100 | for (info_field, value) in tc.get('info_dict', {}).items(): | |
101 | if value.startswith('md5:'): | |
102 | md5_info_value = hashlib.md5(info_dict.get(info_field, '')).hexdigest() | |
103 | self.assertEqual(value[3:], md5_info_value) | |
104 | else: | |
105 | self.assertEqual(value, info_dict.get(info_field)) | |
106 | finally: | |
107 | for tc in test_cases: | |
108 | _try_rm(tc['file']) | |
109 | _try_rm(tc['file'] + '.info.json') | |
fd5ff020 | 110 | |
1535ac2a | 111 | return test_template |
fd5ff020 | 112 | |
5d01a647 | 113 | ### And add them to TestDownload |
fd5ff020 | 114 | for test_case in defs: |
5d01a647 | 115 | test_method = generator(test_case) |
fd5ff020 FV |
116 | test_method.__name__ = "test_{0}".format(test_case["name"]) |
117 | setattr(TestDownload, test_method.__name__, test_method) | |
5d01a647 | 118 | del test_method |
cdab8aa3 PH |
119 | |
120 | ||
121 | if __name__ == '__main__': | |
122 | unittest.main() |